diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index d1deddb..0cc1dce 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -2,45 +2,40 @@ name: CI
on:
push:
- branches:
- - main
+ branches-ignore:
+ - "wip*"
tags:
- "v[0-9].*"
pull_request:
- release:
- types: [published]
schedule:
# Daily at 5:33
- cron: "33 5 * * *"
workflow_dispatch:
-jobs:
- pre-commit:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - uses: actions/setup-python@v4
- with:
- python-version: "3.x"
- - uses: pre-commit/action@v3.0.0
+permissions: {}
+jobs:
list:
runs-on: ubuntu-latest
outputs:
noxenvs: ${{ steps.noxenvs-matrix.outputs.noxenvs }}
steps:
- uses: actions/checkout@v4
- - name: Set up nox
- uses: wntrblm/nox@2023.04.22
+ with:
+ persist-credentials: false
+ - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb
+ with:
+ enable-cache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
- id: noxenvs-matrix
run: |
echo >>$GITHUB_OUTPUT noxenvs=$(
- nox --list-sessions --json | jq '[.[].session]'
+ uvx nox --list-sessions --json | jq '[.[].session]'
)
test:
needs: list
runs-on: ubuntu-latest
+
strategy:
fail-fast: false
matrix:
@@ -48,6 +43,8 @@ jobs:
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install -y libenchant-2-dev
if: runner.os == 'Linux' && startsWith(matrix.noxenv, 'docs')
@@ -55,122 +52,281 @@ jobs:
run: brew install enchant
if: runner.os == 'macOS' && startsWith(matrix.noxenv, 'docs')
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: |
- 3.8
3.9
3.10
3.11
3.12
+ 3.13
+ 3.13t
+ 3.14
+ 3.14t
+ pypy3.9
pypy3.10
+ pypy3.11
allow-prereleases: true
- - name: Set up nox
- uses: wntrblm/nox@2023.04.22
+
+ - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb
+ with:
+ enable-cache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
- name: Run nox
- run: nox -s "${{ matrix.noxenv }}"
+ run: uvx nox -s "${{ matrix.noxenv }}" -- ${{ matrix.posargs }} # zizmor: ignore[template-injection]
manylinux:
needs: test
runs-on: ubuntu-latest
+
strategy:
+ fail-fast: false
matrix:
target: [x86_64, x86, aarch64, armv7, s390x, ppc64le]
+
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v4
with:
- python-version: "3.x"
+ persist-credentials: false
+ - uses: actions/setup-python@v5
+ with:
+ python-version: |
+ 3.9
+ 3.10
+ 3.11
+ 3.12
+ 3.13
+ 3.13t
+ 3.14
+ 3.14t
+ pypy3.9
+ pypy3.10
+ pypy3.11
+ allow-prereleases: true
- name: Build wheels
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
target: ${{ matrix.target }}
- args: --release --out dist --interpreter '3.8 3.9 3.10 3.11 3.12 pypy3.8 pypy3.9 pypy3.10'
- sccache: "true"
+ args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t 3.14 3.14t pypy3.9 pypy3.10 pypy3.11'
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
manylinux: auto
- name: Upload wheels
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: wheels
+ name: dist-${{ github.job }}-${{ matrix.target }}
path: dist
musllinux:
needs: test
runs-on: ubuntu-latest
+
strategy:
+ fail-fast: false
matrix:
target:
- aarch64-unknown-linux-musl
- i686-unknown-linux-musl
- x86_64-unknown-linux-musl
+
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v4
with:
- python-version: "3.x"
+ persist-credentials: false
+ - uses: actions/setup-python@v5
+ with:
+ python-version: |
+ 3.9
+ 3.10
+ 3.11
+ 3.12
+ 3.13
+ 3.13t
+ 3.14
+ 3.14t
+ pypy3.9
+ pypy3.10
+ pypy3.11
+ allow-prereleases: true
- name: Build wheels
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
target: ${{ matrix.target }}
- args: --release --out dist --interpreter '3.8 3.9 3.10 3.11 3.12 pypy3.8 pypy3.9 pypy3.10'
+ args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t 3.14 3.14t pypy3.9 pypy3.10 pypy3.11'
manylinux: musllinux_1_2
- sccache: "true"
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
- name: Upload wheels
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: wheels
+ name: dist-${{ github.job }}-${{ matrix.target }}
path: dist
windows:
needs: test
runs-on: windows-latest
+
strategy:
+ fail-fast: false
matrix:
- target: [x64, x86]
+ target: [x64, x86] # x86 is not supported by pypy
+
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v4
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v5
with:
python-version: |
- 3.8
3.9
3.10
3.11
3.12
+ 3.13
+ 3.14
+ ${{ matrix.target == 'x64' && 'pypy3.9' || '' }}
+ ${{ matrix.target == 'x64' && 'pypy3.10' || '' }}
+ allow-prereleases: true
+ architecture: ${{ matrix.target }}
+ - name: Build wheels
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
+ with:
+ target: ${{ matrix.target }}
+ args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.14' --interpreter ${{ matrix.target == 'x64' && 'pypy3.9 pypy3.10' || '' }}
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
+ - name: Upload wheels
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist-${{ github.job }}-${{ matrix.target }}
+ path: dist
+
+ windows-arm:
+ needs: test
+ runs-on: windows-11-arm
+
+ strategy:
+ fail-fast: false
+ matrix:
+ target:
+ - aarch64-pc-windows-msvc
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ # Install each python version seperatly so that the paths can be passed to maturin. (otherwise finds pre-installed x64 versions)
+ - uses: actions/setup-python@v5
+ id: cp311
+ with:
+ python-version: 3.11
+ allow-prereleases: true
+ architecture: arm64
+ - uses: actions/setup-python@v5
+ id: cp312
+ with:
+ python-version: 3.12
+ allow-prereleases: true
+ architecture: arm64
+ - uses: actions/setup-python@v5
+ id: cp313
+ with:
+ python-version: 3.13
+ allow-prereleases: true
+ architecture: arm64
+ - uses: actions/setup-python@v5
+ id: cp314
+ with:
+ python-version: 3.14
+ allow-prereleases: true
+ architecture: arm64
+ # rust toolchain is not currently installed on windopws arm64 images: https://github.com/actions/partner-runner-images/issues/77
+ - name: Setup rust
+ id: setup-rust
+ run: |
+ Invoke-WebRequest https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe -OutFile .\rustup-init.exe
+ .\rustup-init.exe -y
+ Add-Content $env:GITHUB_PATH "$env:USERPROFILE\.cargo\bin"
+ - name: Build wheels
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
+ with:
+ target: ${{ matrix.target }}
+ args: --release --out dist --interpreter ${{ steps.cp311.outputs.python-path }} ${{ steps.cp312.outputs.python-path }} ${{ steps.cp313.outputs.python-path }} ${{ steps.cp314.outputs.python-path }}
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
+ - name: Upload wheels
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist-${{ github.job }}-${{ matrix.target }}
+ path: dist
+
+ # free-threaded and normal builds share a site-packages folder on Windows so
+ # we must build free-threaded separately
+ windows-free-threaded:
+ needs: test
+ runs-on: windows-latest
+
+ strategy:
+ fail-fast: false
+ matrix:
+ target: [x64, x86] # x86 is not supported by pypy
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v5
+ with:
+ python-version: |
+ 3.13t
+ 3.14t
+ allow-prereleases: true
architecture: ${{ matrix.target }}
- name: Build wheels
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
target: ${{ matrix.target }}
- args: --release --out dist --interpreter '3.8 3.9 3.10 3.11 3.12'
- sccache: "true"
+ args: --release --out dist --interpreter '3.13t 3.14t'
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
- name: Upload wheels
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: wheels
+ name: dist-${{ github.job }}-${{ matrix.target }}-free-threaded
path: dist
macos:
needs: test
runs-on: macos-latest
+
strategy:
+ fail-fast: false
matrix:
target: [x86_64, aarch64]
+
steps:
- uses: actions/checkout@v4
- - uses: actions/setup-python@v4
with:
- python-version: "3.x"
+ persist-credentials: false
+ - uses: actions/setup-python@v5
+ with:
+ python-version: |
+ 3.9
+ 3.10
+ 3.11
+ 3.12
+ 3.13
+ 3.13t
+ 3.14
+ 3.14t
+ pypy3.9
+ pypy3.10
+ pypy3.11
+ allow-prereleases: true
- name: Build wheels
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
target: ${{ matrix.target }}
- args: --release --out dist --interpreter '3.8 3.9 3.10 3.11 3.12 pypy3.8 pypy3.9 pypy3.10'
- sccache: "true"
+ args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t 3.14 3.14t pypy3.9 pypy3.10 pypy3.11'
+ sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning]
- name: Upload wheels
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: wheels
+ name: dist-${{ github.job }}-${{ matrix.target }}
path: dist
sdist:
@@ -178,19 +334,25 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ - uses: actions/setup-python@v5
+ with:
+ python-version: 3.13
- name: Build an sdist
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
command: sdist
args: --out dist
- name: Upload sdist
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: wheels
+ name: dist-${{ github.job }}
path: dist
release:
- needs: [manylinux, musllinux, windows, macos]
+ needs:
+ [manylinux, musllinux, windows, windows-arm, windows-free-threaded, macos]
runs-on: ubuntu-latest
if: "startsWith(github.ref, 'refs/tags/')"
environment:
@@ -201,17 +363,18 @@ jobs:
id-token: write
steps:
- - uses: actions/download-artifact@v3
+ - uses: actions/download-artifact@v4
with:
- name: wheels
+ pattern: dist-*
+ merge-multiple: true
- name: Publish to PyPI
- uses: PyO3/maturin-action@v1
+ uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1
with:
command: upload
args: --non-interactive --skip-existing *
- name: Create a GitHub Release
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
- uses: softprops/action-gh-release@v1
+ uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631
with:
files: |
*
diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml
new file mode 100644
index 0000000..8488704
--- /dev/null
+++ b/.github/workflows/zizmor.yml
@@ -0,0 +1,34 @@
+name: GitHub Actions Security Analysis with zizmor 🌈
+
+on:
+ push:
+ branches: ["main"]
+ pull_request:
+ branches: ["**"]
+
+permissions: {}
+
+jobs:
+ zizmor:
+ runs-on: ubuntu-latest
+
+ permissions:
+ security-events: write
+
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ with:
+ persist-credentials: false
+
+ - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
+ - name: Run zizmor 🌈
+ run: uvx zizmor --format=sarif . > results.sarif
+
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Upload SARIF file
+ uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19
+ with:
+ sarif_file: results.sarif
+ category: zizmor
diff --git a/.github/zizmor.yml b/.github/zizmor.yml
new file mode 100644
index 0000000..a9729df
--- /dev/null
+++ b/.github/zizmor.yml
@@ -0,0 +1,5 @@
+rules:
+ template-injection:
+ ignore:
+ # our matrix is dynamically generated via `nox -l` but with no user input
+ - CI.yml:71:9
diff --git a/.gitignore b/.gitignore
index c8f0442..62e01a9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -70,3 +70,9 @@ docs/_build/
# Pyenv
.python-version
+
+# User defined
+/dirhtml
+_cache
+
+TODO
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f387f35..70a87a4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,10 +3,11 @@ ci:
# pre-commit.ci doesn't have Rust installed
- fmt
- clippy
+ - zizmor
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
+ rev: v5.0.0
hooks:
- id: check-ast
- id: check-docstring-first
@@ -23,19 +24,15 @@ repos:
hooks:
- id: fmt
- id: clippy
- - repo: https://github.com/PyCQA/isort
- rev: 5.12.0
- hooks:
- - id: isort
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.15.0
- hooks:
- - id: pyupgrade
- repo: https://github.com/psf/black
- rev: 23.11.0
+ rev: 25.1.0
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v3.1.0"
+ rev: "v4.0.0-alpha.8"
hooks:
- id: prettier
+ - repo: https://github.com/woodruffw/zizmor
+ rev: v0.8.0
+ hooks:
+ - id: zizmor
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 0000000..a6caba1
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,18 @@
+version: 2
+
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+ rust: "1.70"
+
+sphinx:
+ builder: dirhtml
+ configuration: docs/conf.py
+ fail_on_warning: true
+
+formats: all
+
+python:
+ install:
+ - requirements: docs/requirements.txt
diff --git a/Cargo.lock b/Cargo.lock
index 9e52c63..a1842f2 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,121 +1,90 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
-version = 3
+version = 4
[[package]]
name = "archery"
-version = "1.1.0"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "487955f60962765486ce000015a3492ca45c34a2ebbf12bc0aa2b5110ca6e7d2"
+checksum = "eae2ed21cd55021f05707a807a5fc85695dafb98832921f6cfa06db67ca5b869"
dependencies = [
- "static_assertions",
"triomphe",
]
[[package]]
name = "autocfg"
-version = "1.1.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
-name = "bitflags"
-version = "1.3.2"
+name = "cc"
+version = "1.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+checksum = "d0fc897dc1e865cc67c0e05a836d9d3f1df3cbe442aa4a9473b18e12624a4951"
+dependencies = [
+ "shlex",
+]
[[package]]
name = "heck"
-version = "0.4.1"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "indoc"
-version = "2.0.4"
+version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
[[package]]
name = "libc"
-version = "0.2.147"
+version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
-
-[[package]]
-name = "lock_api"
-version = "0.4.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
-dependencies = [
- "autocfg",
- "scopeguard",
-]
+checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "memoffset"
-version = "0.9.0"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
dependencies = [
"autocfg",
]
[[package]]
name = "once_cell"
-version = "1.18.0"
+version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
-
-[[package]]
-name = "parking_lot"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
-dependencies = [
- "lock_api",
- "parking_lot_core",
-]
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
-name = "parking_lot_core"
-version = "0.9.8"
+name = "portable-atomic"
+version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
-dependencies = [
- "cfg-if",
- "libc",
- "redox_syscall",
- "smallvec",
- "windows-targets",
-]
+checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e"
[[package]]
name = "proc-macro2"
-version = "1.0.66"
+version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
+checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
-version = "0.20.0"
+version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04e8453b658fe480c3e70c8ed4e3d3ec33eb74988bd186561b0cc66b85c3bc4b"
+checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4"
dependencies = [
- "cfg-if",
"indoc",
"libc",
"memoffset",
- "parking_lot",
+ "once_cell",
+ "portable-atomic",
"pyo3-build-config",
"pyo3-ffi",
"pyo3-macros",
@@ -124,19 +93,20 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
-version = "0.20.0"
+version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a96fe70b176a89cff78f2fa7b3c930081e163d5379b4dcdf993e3ae29ca662e5"
+checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d"
dependencies = [
"once_cell",
+ "python3-dll-a",
"target-lexicon",
]
[[package]]
name = "pyo3-ffi"
-version = "0.20.0"
+version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "214929900fd25e6604661ed9cf349727c8920d47deff196c4e28165a6ef2a96b"
+checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e"
dependencies = [
"libc",
"pyo3-build-config",
@@ -144,9 +114,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
-version = "0.20.0"
+version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dac53072f717aa1bfa4db832b39de8c875b7c7af4f4a6fe93cdbf9264cf8383b"
+checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -156,46 +126,47 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
-version = "0.20.0"
+version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7774b5a8282bd4f25f803b1f0d945120be959a36c72e08e7cd031c792fdfd424"
+checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e"
dependencies = [
"heck",
"proc-macro2",
+ "pyo3-build-config",
"quote",
"syn",
]
[[package]]
-name = "quote"
-version = "1.0.31"
+name = "python3-dll-a"
+version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
+checksum = "d381ef313ae70b4da5f95f8a4de773c6aa5cd28f73adec4b4a31df70b66780d8"
dependencies = [
- "proc-macro2",
+ "cc",
]
[[package]]
-name = "redox_syscall"
-version = "0.3.5"
+name = "quote"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
- "bitflags",
+ "proc-macro2",
]
[[package]]
name = "rpds"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0e15515d3ce3313324d842629ea4905c25a13f81953eadb88f85516f59290a4"
+checksum = "a7f89f654d51fffdd6026289d07d1fd523244d46ae0a8bc22caa6dd7f9e8cb0b"
dependencies = [
"archery",
]
[[package]]
name = "rpds-py"
-version = "0.13.1"
+version = "0.25.1"
dependencies = [
"archery",
"pyo3",
@@ -203,28 +174,16 @@ dependencies = [
]
[[package]]
-name = "scopeguard"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
-
-[[package]]
-name = "smallvec"
-version = "1.11.0"
+name = "shlex"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
-
-[[package]]
-name = "static_assertions"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "syn"
-version = "2.0.32"
+version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2"
+checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [
"proc-macro2",
"quote",
@@ -233,81 +192,24 @@ dependencies = [
[[package]]
name = "target-lexicon"
-version = "0.12.9"
+version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df8e77cb757a61f51b947ec4a7e3646efd825b73561db1c232a8ccb639e611a0"
+checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
[[package]]
name = "triomphe"
-version = "0.1.9"
+version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f"
+checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85"
[[package]]
name = "unicode-ident"
-version = "1.0.11"
+version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
+checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unindent"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
-
-[[package]]
-name = "windows-targets"
-version = "0.48.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
-dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
-]
-
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.48.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.48.0"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
diff --git a/Cargo.toml b/Cargo.toml
index 6d98e59..54ab9f2 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "rpds-py"
-version = "0.13.1"
+version = "0.25.1"
edition = "2021"
[lib]
@@ -8,9 +8,11 @@ name = "rpds"
crate-type = ["cdylib"]
[dependencies]
-rpds = "1.1.0"
-archery = "1.1.0"
+rpds = "1.1.1"
+archery = "1.2.1"
[dependencies.pyo3]
-version = "0.20.0"
-features = ["extension-module"]
+version = "0.25.0"
+# To build extension for PyPy on Windows, "generate-import-lib" is needed:
+# https://github.com/PyO3/maturin-action/issues/267#issuecomment-2106844429
+features = ["extension-module", "generate-import-lib"]
diff --git a/README.rst b/README.rst
index 13f0d9e..4f3e49b 100644
--- a/README.rst
+++ b/README.rst
@@ -16,6 +16,10 @@
:alt: Build status
:target: https://github.com/crate-py/rpds/actions?query=workflow%3ACI
+.. |ReadTheDocs| image:: https://readthedocs.org/projects/referencing/badge/?version=stable&style=flat
+ :alt: ReadTheDocs status
+ :target: https://referencing.readthedocs.io/en/stable/
+
Python bindings to the `Rust rpds crate `_ for persistent data structures.
diff --git a/docs/api.rst b/docs/api.rst
new file mode 100644
index 0000000..d978fca
--- /dev/null
+++ b/docs/api.rst
@@ -0,0 +1,8 @@
+API Reference
+=============
+
+.. automodule:: rpds
+ :members:
+ :undoc-members:
+ :imported-members:
+ :special-members: __iter__, __getitem__, __len__, __rmatmul__
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..ef63ad2
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,86 @@
+import importlib.metadata
+import re
+
+from url import URL
+
+GITHUB = URL.parse("https://github.com/")
+HOMEPAGE = GITHUB / "crate-py/rpds"
+
+project = "rpds.py"
+author = "Julian Berman"
+copyright = f"2023, {author}"
+
+release = importlib.metadata.version("rpds.py")
+version = release.partition("-")[0]
+
+language = "en"
+default_role = "any"
+
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosectionlabel",
+ "sphinx.ext.coverage",
+ "sphinx.ext.doctest",
+ "sphinx.ext.extlinks",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.todo",
+ "sphinx.ext.viewcode",
+ "sphinx_copybutton",
+ "sphinxcontrib.spelling",
+ "sphinxext.opengraph",
+]
+
+pygments_style = "lovelace"
+pygments_dark_style = "one-dark"
+
+html_theme = "furo"
+
+
+def entire_domain(host):
+ return r"http.?://" + re.escape(host) + r"($|/.*)"
+
+
+linkcheck_ignore = [
+ entire_domain("img.shields.io"),
+ f"{GITHUB}.*#.*",
+ str(HOMEPAGE / "actions"),
+ str(HOMEPAGE / "workflows/CI/badge.svg"),
+]
+
+# = Extensions =
+
+# -- autodoc --
+
+autodoc_default_options = {
+ "members": True,
+ "member-order": "bysource",
+}
+
+# -- autosectionlabel --
+
+autosectionlabel_prefix_document = True
+
+# -- intersphinx --
+
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/", None),
+}
+
+# -- extlinks --
+
+extlinks = {
+ "gh": (str(HOMEPAGE) + "/%s", None),
+ "github": (str(GITHUB) + "/%s", None),
+}
+extlinks_detect_hardcoded_links = True
+
+# -- sphinx-copybutton --
+
+copybutton_prompt_text = r">>> |\.\.\. |\$"
+copybutton_prompt_is_regexp = True
+
+# -- sphinxcontrib-spelling --
+
+spelling_word_list_filename = "spelling-wordlist.txt"
+spelling_show_suggestions = True
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..741eb17
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,52 @@
+Python bindings to the `Rust rpds crate `_ for persistent data structures.
+
+What's here is quite minimal (in transparency, it was written initially to support replacing ``pyrsistent`` in the `referencing library `_).
+If you see something missing (which is very likely), a PR is definitely welcome to add it.
+
+Installation
+------------
+
+The distribution on PyPI is named ``rpds.py`` (equivalently ``rpds-py``), and thus can be installed via e.g.:
+
+.. code:: sh
+
+ $ pip install rpds-py
+
+Note that if you install ``rpds-py`` from source, you will need a Rust toolchain installed, as it is a build-time dependency.
+An example of how to do so in a ``Dockerfile`` can be found `here `_.
+
+If you believe you are on a common platform which should have wheels built (i.e. and not need to compile from source), feel free to file an issue or pull request modifying the GitHub action used here to build wheels via ``maturin``.
+
+Usage
+-----
+
+Methods in general are named similarly to their ``rpds`` counterparts (rather than ``pyrsistent``\ 's conventions, though probably a full drop-in ``pyrsistent``\ -compatible wrapper module is a good addition at some point).
+
+.. code:: python
+
+ >>> from rpds import HashTrieMap, HashTrieSet, List
+
+ >>> m = HashTrieMap({"foo": "bar", "baz": "quux"})
+ >>> m.insert("spam", 37) == HashTrieMap({"foo": "bar", "baz": "quux", "spam": 37})
+ True
+ >>> m.remove("foo") == HashTrieMap({"baz": "quux"})
+ True
+
+ >>> s = HashTrieSet({"foo", "bar", "baz", "quux"})
+ >>> s.insert("spam") == HashTrieSet({"foo", "bar", "baz", "quux", "spam"})
+ True
+ >>> s.remove("foo") == HashTrieSet({"bar", "baz", "quux"})
+ True
+
+ >>> L = List([1, 3, 5])
+ >>> L.push_front(-1) == List([-1, 1, 3, 5])
+ True
+ >>> L.rest == List([3, 5])
+ True
+
+
+.. toctree::
+ :glob:
+ :hidden:
+
+ api
diff --git a/docs/requirements.in b/docs/requirements.in
new file mode 100644
index 0000000..01f3186
--- /dev/null
+++ b/docs/requirements.in
@@ -0,0 +1,8 @@
+file:.#egg=rpds-py
+furo
+pygments-github-lexers
+sphinx-copybutton
+sphinx>5
+sphinxcontrib-spelling>5
+sphinxext-opengraph
+url.py
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..3171c85
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,81 @@
+# This file was autogenerated by uv via the following command:
+# uv pip compile --output-file /Users/julian/Development/rpds.py/docs/requirements.txt docs/requirements.in
+alabaster==1.0.0
+ # via sphinx
+babel==2.17.0
+ # via sphinx
+beautifulsoup4==4.13.4
+ # via furo
+certifi==2025.4.26
+ # via requests
+charset-normalizer==3.4.2
+ # via requests
+docutils==0.21.2
+ # via sphinx
+furo==2024.8.6
+ # via -r docs/requirements.in
+idna==3.10
+ # via requests
+imagesize==1.4.1
+ # via sphinx
+jinja2==3.1.6
+ # via sphinx
+markupsafe==3.0.2
+ # via jinja2
+packaging==25.0
+ # via sphinx
+pyenchant==3.2.2
+ # via sphinxcontrib-spelling
+pygments==2.19.1
+ # via
+ # furo
+ # pygments-github-lexers
+ # sphinx
+pygments-github-lexers==0.0.5
+ # via -r docs/requirements.in
+requests==2.32.3
+ # via
+ # sphinx
+ # sphinxcontrib-spelling
+roman-numerals-py==3.1.0
+ # via sphinx
+rpds-py @ file:.#egg=rpds-py
+ # via -r docs/requirements.in
+snowballstemmer==3.0.1
+ # via sphinx
+soupsieve==2.7
+ # via beautifulsoup4
+sphinx==8.2.3
+ # via
+ # -r docs/requirements.in
+ # furo
+ # sphinx-basic-ng
+ # sphinx-copybutton
+ # sphinxcontrib-spelling
+ # sphinxext-opengraph
+sphinx-basic-ng==1.0.0b2
+ # via furo
+sphinx-copybutton==0.5.2
+ # via -r docs/requirements.in
+sphinxcontrib-applehelp==2.0.0
+ # via sphinx
+sphinxcontrib-devhelp==2.0.0
+ # via sphinx
+sphinxcontrib-htmlhelp==2.1.0
+ # via sphinx
+sphinxcontrib-jsmath==1.0.1
+ # via sphinx
+sphinxcontrib-qthelp==2.0.0
+ # via sphinx
+sphinxcontrib-serializinghtml==2.0.0
+ # via sphinx
+sphinxcontrib-spelling==8.0.1
+ # via -r docs/requirements.in
+sphinxext-opengraph==0.10.0
+ # via -r docs/requirements.in
+typing-extensions==4.13.2
+ # via beautifulsoup4
+url-py==0.14.1
+ # via -r docs/requirements.in
+urllib3==2.4.0
+ # via requests
diff --git a/docs/spelling-wordlist.txt b/docs/spelling-wordlist.txt
new file mode 100644
index 0000000..15c6650
--- /dev/null
+++ b/docs/spelling-wordlist.txt
@@ -0,0 +1,3 @@
+iter
+len
+toolchain
diff --git a/noxfile.py b/noxfile.py
index 30e9eb1..8b9d55d 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -1,52 +1,193 @@
from pathlib import Path
from tempfile import TemporaryDirectory
+import os
import nox
ROOT = Path(__file__).parent
-TESTS = ROOT / "tests"
PYPROJECT = ROOT / "pyproject.toml"
+DOCS = ROOT / "docs"
+TESTS = ROOT / "tests"
+
+REQUIREMENTS = dict(
+ docs=DOCS / "requirements.txt",
+ tests=TESTS / "requirements.txt",
+)
+REQUIREMENTS_IN = [ # this is actually ordered, as files depend on each other
+ (path.parent / f"{path.stem}.in", path) for path in REQUIREMENTS.values()
+]
+SUPPORTED = [
+ "3.9",
+ "3.10",
+ "pypy3.10",
+ "3.11",
+ "pypy3.11",
+ "3.12",
+ "3.13",
+ "3.13t",
+ "3.14",
+ "3.14t",
+]
+LATEST = "3.13"
+nox.options.default_venv_backend = "uv|virtualenv"
nox.options.sessions = []
-def session(default=True, **kwargs):
+def session(default=True, python=LATEST, **kwargs): # noqa: D103
def _session(fn):
if default:
nox.options.sessions.append(kwargs.get("name", fn.__name__))
- return nox.session(**kwargs)(fn)
+ return nox.session(python=python, **kwargs)(fn)
return _session
-@session(python=["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"])
+@session(python=SUPPORTED)
def tests(session):
- session.install(ROOT, "-r", TESTS / "requirements.txt")
- if session.posargs == ["coverage"]:
+ """
+ Run the test suite with a corresponding Python version.
+ """
+ # Really we want --profile=test here (for
+ # https://github.com/crate-py/rpds/pull/87#issuecomment-2291409297)
+ # but it produces strange symbol errors saying:
+ # dynamic module does not define module export function (PyInit_rpds)
+ # so OK, dev it is.
+ session.install(
+ "--config-settings",
+ "build-args=--profile=dev",
+ "--no-cache",
+ "-r",
+ REQUIREMENTS["tests"],
+ )
+
+ if session.posargs and session.posargs[0] == "coverage":
+ if len(session.posargs) > 1 and session.posargs[1] == "github":
+ github = Path(os.environ["GITHUB_STEP_SUMMARY"])
+ else:
+ github = None
+
session.install("coverage[toml]")
- session.run("coverage", "run", "-m", "pytest")
- session.run("coverage", "report")
+ session.run("coverage", "run", "-m", "pytest", TESTS)
+ if github is None:
+ session.run("coverage", "report")
+ else:
+ with github.open("a") as summary:
+ summary.write("### Coverage\n\n")
+ summary.flush() # without a flush, output seems out of order.
+ session.run(
+ "coverage",
+ "report",
+ "--format=markdown",
+ stdout=summary,
+ )
else:
- session.run("pytest", *session.posargs, TESTS)
+ session.run("pytest", "--parallel-threads=10", *session.posargs, TESTS)
+
+
+@session()
+def audit(session):
+ """
+ Audit dependencies for vulnerabilities.
+ """
+ session.install("pip-audit", ROOT)
+ session.run("python", "-m", "pip_audit")
@session(tags=["build"])
def build(session):
+ """
+ Build a distribution suitable for PyPI and check its validity.
+ """
session.install("build", "twine")
with TemporaryDirectory() as tmpdir:
session.run("python", "-m", "build", ROOT, "--outdir", tmpdir)
session.run("twine", "check", "--strict", tmpdir + "/*")
-@session(default=False)
-def requirements(session):
- session.install("pip-tools")
- for each in [TESTS / "requirements.in"]:
+@session(tags=["style"])
+def style(session):
+ """
+ Check Python code style.
+ """
+ session.install("ruff")
+ session.run("ruff", "check", TESTS, __file__)
+
+
+@session()
+def typing(session):
+ """
+ Check the codebase using pyright by type checking the test suite.
+ """
+ session.install("pyright", ROOT, "-r", REQUIREMENTS["tests"])
+ session.run("pyright", TESTS)
+
+
+@session(tags=["docs"])
+@nox.parametrize(
+ "builder",
+ [
+ nox.param(name, id=name)
+ for name in [
+ "dirhtml",
+ "doctest",
+ "linkcheck",
+ "man",
+ "spelling",
+ ]
+ ],
+)
+def docs(session, builder):
+ """
+ Build the documentation using a specific Sphinx builder.
+ """
+ session.install("-r", REQUIREMENTS["docs"])
+ with TemporaryDirectory() as tmpdir_str:
+ tmpdir = Path(tmpdir_str)
+ argv = ["-n", "-T", "-W"]
+ if builder != "spelling":
+ argv += ["-q"]
+ posargs = session.posargs or [tmpdir / builder]
session.run(
- "pip-compile",
- "--resolver",
- "backtracking",
- "-U",
- each.relative_to(ROOT),
+ "python",
+ "-m",
+ "sphinx",
+ "-b",
+ builder,
+ DOCS,
+ *argv,
+ *posargs,
)
+
+
+@session(tags=["docs", "style"], name="docs(style)")
+def docs_style(session):
+ """
+ Check the documentation style.
+ """
+ session.install(
+ "doc8",
+ "pygments",
+ "pygments-github-lexers",
+ )
+ session.run("python", "-m", "doc8", "--config", PYPROJECT, DOCS)
+
+
+@session(default=False)
+def requirements(session):
+ """
+ Update the project's pinned requirements.
+
+ You should commit the result afterwards.
+ """
+ if session.venv_backend == "uv":
+ cmd = ["uv", "pip", "compile"]
+ else:
+ session.install("pip-tools")
+ cmd = ["pip-compile", "--resolver", "backtracking", "--strip-extras"]
+
+ for each, out in REQUIREMENTS_IN:
+ # otherwise output files end up with silly absolute path comments...
+ relative = each.relative_to(ROOT)
+ session.run(*cmd, "--upgrade", "--output-file", out, relative)
diff --git a/pyproject.toml b/pyproject.toml
index e87ac9f..bb66591 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,29 +1,29 @@
[build-system]
-requires = ["maturin>=1.0,<2.0"]
+requires = ["maturin>=1.2,<2.0"]
build-backend = "maturin"
[project]
name = "rpds-py"
description = "Python bindings to Rust's persistent data structures (rpds)"
+requires-python = ">=3.9"
readme = "README.rst"
-license = {text = "MIT"}
-requires-python = ">=3.8"
+license = "MIT"
+license-files = ["LICENSE"]
keywords = ["data structures", "rust", "persistent"]
authors = [
- {email = "Julian+rpds@GrayVines.com"},
- {name = "Julian Berman"},
+ { name = "Julian Berman", email = "Julian+rpds@GrayVines.com" },
]
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
- "License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Rust",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
@@ -31,20 +31,113 @@ classifiers = [
dynamic = ["version"]
[project.urls]
+Documentation = "https://rpds.readthedocs.io/"
Homepage = "https://github.com/crate-py/rpds"
Issues = "https://github.com/crate-py/rpds/issues/"
Funding = "https://github.com/sponsors/Julian"
+Tidelift = "https://tidelift.com/subscription/pkg/pypi-rpds-py?utm_source=pypi-rpds-py&utm_medium=referral&utm_campaign=pypi-link"
Source = "https://github.com/crate-py/rpds"
+Upstream = "https://github.com/orium/rpds"
[tool.black]
line-length = 79
-[tool.isort]
-combine_as_imports = true
-from_first = true
-include_trailing_comma = true
-multi_line_output = 3
-known_first_party = ["rpds"]
+[tool.coverage.html]
+show_contexts = true
+skip_covered = false
+
+[tool.coverage.run]
+branch = true
+dynamic_context = "test_function"
+
+[tool.coverage.report]
+exclude_also = [
+ "if TYPE_CHECKING:",
+ "\\s*\\.\\.\\.\\s*",
+]
+fail_under = 100
+show_missing = true
+skip_covered = true
+
+[tool.doc8]
+ignore = [
+ "D000", # see PyCQA/doc8#125
+ "D001", # one sentence per line, so max length doesn't make sense
+]
[tool.maturin]
features = ["pyo3/extension-module"]
+
+[tool.pyright]
+reportUnnecessaryTypeIgnoreComment = true
+strict = ["**/*"]
+exclude = [
+ "**/tests/__init__.py",
+ "**/tests/test_*.py",
+]
+
+[tool.ruff]
+line-length = 79
+
+[tool.ruff.lint]
+select = ["ALL"]
+ignore = [
+ "A001", # It's fine to shadow builtins
+ "A002",
+ "A003",
+ "A005",
+ "ARG", # This is all wrong whenever an interface is involved
+ "ANN", # Just let the type checker do this
+ "B006", # Mutable arguments require care but are OK if you don't abuse them
+ "B008", # It's totally OK to call functions for default arguments.
+ "B904", # raise SomeException(...) is fine.
+ "B905", # No need for explicit strict, this is simply zip's default behavior
+ "C408", # Calling dict is fine when it saves quoting the keys
+ "C901", # Not really something to focus on
+ "D105", # It's fine to not have docstrings for magic methods.
+ "D107", # __init__ especially doesn't need a docstring
+ "D200", # This rule makes diffs uglier when expanding docstrings
+ "D203", # No blank lines before docstrings.
+ "D212", # Start docstrings on the second line.
+ "D400", # This rule misses sassy docstrings ending with ! or ?
+ "D401", # This rule is too flaky.
+ "D406", # Section headers should end with a colon not a newline
+ "D407", # Underlines aren't needed
+ "D412", # Plz spaces after section headers
+ "EM101", # These don't bother me, it's fine there's some duplication.
+ "EM102",
+ "FBT", # It's worth avoiding boolean args but I don't care to enforce it
+ "FIX", # Yes thanks, if I could it wouldn't be there
+ "N", # These naming rules are silly
+ "PLR0912", # These metrics are fine to be aware of but not to enforce
+ "PLR0913",
+ "PLR0915",
+ "PLW2901", # Shadowing for loop variables is occasionally fine.
+ "PT006", # pytest parametrize takes strings as well
+ "PYI025", # wat, I'm not confused, thanks.
+ "RET502", # Returning None implicitly is fine
+ "RET503",
+ "RET505", # These push you to use `if` instead of `elif`, but for no reason
+ "RET506",
+ "RSE102", # Ha, what, who even knew you could leave the parens off. But no.
+ "SIM300", # Not sure what heuristic this uses, but it's easily incorrect
+ "SLF001", # Private usage within this package itself is fine
+ "TD", # These TODO style rules are also silly
+ "UP007", # We support 3.9
+]
+
+[tool.ruff.lint.flake8-pytest-style]
+mark-parentheses = false
+
+[tool.ruff.lint.flake8-quotes]
+docstring-quotes = "double"
+
+[tool.ruff.lint.isort]
+combine-as-imports = true
+from-first = true
+known-first-party = ["rpds"]
+
+[tool.ruff.lint.per-file-ignores]
+"noxfile.py" = ["ANN", "D100", "S101", "T201"]
+"docs/*" = ["ANN", "D", "INP001"]
+"tests/*" = ["ANN", "B018", "D", "PLR", "RUF012", "S", "SIM", "TRY"]
diff --git a/rpds.pyi b/rpds.pyi
index b9b6555..5af0e32 100644
--- a/rpds.pyi
+++ b/rpds.pyi
@@ -1,5 +1,4 @@
from typing import (
- FrozenSet,
ItemsView,
Iterable,
Iterator,
@@ -9,44 +8,70 @@ from typing import (
ValuesView,
)
-T = TypeVar("T")
-KT = TypeVar("KT", covariant=True)
-VT = TypeVar("VT", covariant=True)
+_T = TypeVar("_T")
+_KT_co = TypeVar("_KT_co", covariant=True)
+_VT_co = TypeVar("_VT_co", covariant=True)
+_KU_co = TypeVar("_KU_co", covariant=True)
+_VU_co = TypeVar("_VU_co", covariant=True)
-class HashTrieMap(Mapping[KT, VT]):
+class HashTrieMap(Mapping[_KT_co, _VT_co]):
def __init__(
self,
- value: Mapping[KT, VT] | Iterable[tuple[KT, VT]] = {},
- **kwds: Mapping[KT, VT],
+ value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]] = {},
+ **kwds: Mapping[_KT_co, _VT_co],
): ...
- def __getitem__(self, key: KT) -> VT: ...
- def __iter__(self) -> Iterator[KT]: ...
+ def __getitem__(self, key: _KT_co) -> _VT_co: ...
+ def __iter__(self) -> Iterator[_KT_co]: ...
def __len__(self) -> int: ...
- def discard(self, key: KT) -> "HashTrieMap[KT, VT]": ...
- def items(self) -> ItemsView[KT, VT]: ...
- def keys(self) -> KeysView[KT]: ...
- def values(self) -> ValuesView[VT]: ...
- def remove(self, key: KT) -> "HashTrieMap[KT, VT]": ...
- def insert(self, key: KT, val: VT) -> "HashTrieMap[KT, VT]": ...
- def update(self, *args: Mapping): ...
+ def discard(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ...
+ def items(self) -> ItemsView[_KT_co, _VT_co]: ...
+ def keys(self) -> KeysView[_KT_co]: ...
+ def values(self) -> ValuesView[_VT_co]: ...
+ def remove(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ...
+ def insert(
+ self,
+ key: _KT_co,
+ val: _VT_co,
+ ) -> HashTrieMap[_KT_co, _VT_co]: ...
+ def update(
+ self,
+ *args: Mapping[_KU_co, _VU_co] | Iterable[tuple[_KU_co, _VU_co]],
+ ) -> HashTrieMap[_KT_co | _KU_co, _VT_co | _VU_co]: ...
@classmethod
def convert(
cls,
- value: Mapping[KT, VT] | Iterable[tuple[KT, VT]],
- ) -> "HashTrieMap[KT, VT]": ...
+ value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]],
+ ) -> HashTrieMap[_KT_co, _VT_co]: ...
+ @classmethod
+ def fromkeys(
+ cls,
+ keys: Iterable[_KT_co],
+ value: _VT_co = None,
+ ) -> HashTrieMap[_KT_co, _VT_co]: ...
+
+class HashTrieSet(frozenset[_T]):
+ def __init__(self, value: Iterable[_T] = ()): ...
+ def __iter__(self) -> Iterator[_T]: ...
+ def __len__(self) -> int: ...
+ def discard(self, value: _T) -> HashTrieSet[_T]: ...
+ def remove(self, value: _T) -> HashTrieSet[_T]: ...
+ def insert(self, value: _T) -> HashTrieSet[_T]: ...
+ def update(self, *args: Iterable[_T]) -> HashTrieSet[_T]: ...
-class HashTrieSet(FrozenSet[T]):
- def __init__(self, value: Iterable[T] = ()): ...
- def __iter__(self) -> Iterator[T]: ...
+class List(Iterable[_T]):
+ def __init__(self, value: Iterable[_T] = (), *more: _T): ...
+ def __iter__(self) -> Iterator[_T]: ...
def __len__(self) -> int: ...
- def discard(self, value: T) -> "HashTrieSet[T]": ...
- def remove(self, value: T) -> "HashTrieSet[T]": ...
- def insert(self, value: T) -> "HashTrieSet[T]": ...
- def update(self, *args: Iterable[T]) -> "HashTrieSet[T]": ...
+ def push_front(self, value: _T) -> List[_T]: ...
+ def drop_first(self) -> List[_T]: ...
-class List(Iterable[T]):
- def __init__(self, value: Iterable[T] = (), *more: T): ...
- def __iter__(self) -> Iterator[T]: ...
+class Queue(Iterable[_T]):
+ def __init__(self, value: Iterable[_T] = (), *more: _T): ...
+ def __iter__(self) -> Iterator[_T]: ...
def __len__(self) -> int: ...
- def push_front(self, value: T) -> "List[T]": ...
- def drop_first(self) -> "List[T]": ...
+ def enqueue(self, value: _T) -> Queue[_T]: ...
+ def dequeue(self, value: _T) -> Queue[_T]: ...
+ @property
+ def is_empty(self) -> _T: ...
+ @property
+ def peek(self) -> _T: ...
diff --git a/src/lib.rs b/src/lib.rs
index 127f7f3..9f266a8 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,19 +1,44 @@
-use std::hash::{Hash, Hasher};
-use std::vec::IntoIter;
-
-use pyo3::exceptions::PyIndexError;
+use pyo3::exceptions::{PyIndexError, PyTypeError};
use pyo3::pyclass::CompareOp;
use pyo3::types::{PyDict, PyIterator, PyTuple, PyType};
-use pyo3::{exceptions::PyKeyError, types::PyMapping};
-use pyo3::{prelude::*, AsPyPointer, PyTypeInfo};
-use rpds::{HashTrieMap, HashTrieMapSync, HashTrieSet, HashTrieSetSync, List, ListSync};
+use pyo3::{exceptions::PyKeyError, types::PyMapping, types::PyTupleMethods};
+use pyo3::{prelude::*, BoundObject, PyTypeInfo};
+use rpds::{
+ HashTrieMap, HashTrieMapSync, HashTrieSet, HashTrieSetSync, List, ListSync, Queue, QueueSync,
+};
+use std::collections::hash_map::DefaultHasher;
+use std::hash::{Hash, Hasher};
-#[derive(Clone, Debug)]
+fn hash_shuffle_bits(h: usize) -> usize {
+ ((h ^ 89869747) ^ (h << 16)).wrapping_mul(3644798167)
+}
+
+#[derive(Debug)]
struct Key {
hash: isize,
inner: PyObject,
}
+impl<'py> IntoPyObject<'py> for Key {
+ type Target = PyAny;
+ type Output = Bound<'py, Self::Target>;
+ type Error = std::convert::Infallible;
+
+ fn into_pyobject(self, py: Python<'py>) -> Result {
+ Ok(self.inner.into_bound(py))
+ }
+}
+
+impl<'a, 'py> IntoPyObject<'py> for &'a Key {
+ type Target = PyAny;
+ type Output = Borrowed<'a, 'py, Self::Target>;
+ type Error = std::convert::Infallible;
+
+ fn into_pyobject(self, py: Python<'py>) -> Result {
+ Ok(self.inner.bind_borrowed(py))
+ }
+}
+
impl Hash for Key {
fn hash(&self, state: &mut H) {
state.write_isize(self.hash);
@@ -33,23 +58,20 @@ impl PartialEq for Key {
}
}
-impl IntoPy for Key {
- fn into_py(self, py: Python<'_>) -> PyObject {
- self.inner.into_py(py)
- }
-}
-
-unsafe impl AsPyPointer for Key {
- fn as_ptr(&self) -> *mut pyo3::ffi::PyObject {
- self.inner.as_ptr()
+impl Key {
+ fn clone_ref(&self, py: Python<'_>) -> Self {
+ Key {
+ hash: self.hash,
+ inner: self.inner.clone_ref(py),
+ }
}
}
impl<'source> FromPyObject<'source> for Key {
- fn extract(ob: &'source PyAny) -> PyResult {
+ fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult {
Ok(Key {
hash: ob.hash()?,
- inner: ob.into(),
+ inner: ob.clone().unbind(),
})
}
}
@@ -67,16 +89,16 @@ impl From> for HashTrieMapPy {
}
impl<'source> FromPyObject<'source> for HashTrieMapPy {
- fn extract(ob: &'source PyAny) -> PyResult {
+ fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult {
let mut ret = HashTrieMap::new_sync();
if let Ok(mapping) = ob.downcast::() {
- for each in mapping.items()?.iter()? {
- let (k, v): (Key, PyObject) = each?.extract()?;
+ for each in mapping.items()?.iter() {
+ let (k, v): (Key, PyObject) = each.extract()?;
ret.insert_mut(k, v);
}
} else {
- for each in ob.iter()? {
- let (k, v): (Key, PyObject) = each?.extract()?;
+ for each in ob.try_iter()? {
+ let (k, v) = each?.extract()?;
ret.insert_mut(k, v);
}
}
@@ -87,8 +109,8 @@ impl<'source> FromPyObject<'source> for HashTrieMapPy {
#[pymethods]
impl HashTrieMapPy {
#[new]
- #[pyo3(signature = (value=None, **kwds))]
- fn init(value: Option, kwds: Option<&PyDict>) -> PyResult {
+ #[pyo3(signature = (value=None, ** kwds))]
+ fn init(value: Option, kwds: Option<&Bound<'_, PyDict>>) -> PyResult {
let mut map: HashTrieMapPy;
if let Some(value) = value {
map = value;
@@ -99,7 +121,7 @@ impl HashTrieMapPy {
}
if let Some(kwds) = kwds {
for (k, v) in kwds {
- map.inner.insert_mut(Key::extract(k)?, v.into());
+ map.inner.insert_mut(Key::extract_bound(&k)?, v.into());
}
}
Ok(map)
@@ -109,18 +131,15 @@ impl HashTrieMapPy {
self.inner.contains_key(&key)
}
- fn __iter__(slf: PyRef<'_, Self>) -> PyResult> {
- Py::new(
- slf.py(),
- KeyIterator {
- inner: slf.keys().into_iter(),
- },
- )
+ fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator {
+ KeysIterator {
+ inner: slf.inner.clone(),
+ }
}
- fn __getitem__(&self, key: Key) -> PyResult {
+ fn __getitem__(&self, key: Key, py: Python) -> PyResult {
match self.inner.get(&key) {
- Some(value) => Ok(value.to_owned()),
+ Some(value) => Ok(value.clone_ref(py)),
None => Err(PyKeyError::new_err(key)),
}
}
@@ -133,7 +152,10 @@ impl HashTrieMapPy {
let contents = self.inner.into_iter().map(|(k, v)| {
format!(
"{}: {}",
- k.clone().into_py(py),
+ k.inner
+ .call_method0(py, "__repr__")
+ .and_then(|r| r.extract(py))
+ .unwrap_or("".to_owned()),
v.call_method0(py, "__repr__")
.and_then(|r| r.extract(py))
.unwrap_or("".to_owned())
@@ -145,61 +167,148 @@ impl HashTrieMapPy {
)
}
- fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyResult {
+ fn __richcmp__<'py>(&self, other: &Self, op: CompareOp, py: Python<'py>) -> PyResult {
match op {
- CompareOp::Eq => Ok((self.inner.size() == other.inner.size()
+ CompareOp::Eq => (self.inner.size() == other.inner.size()
&& self
.inner
.iter()
.map(|(k1, v1)| (v1, other.inner.get(k1)))
- .map(|(v1, v2)| PyAny::eq(v1.extract(py)?, v2))
+ .map(|(v1, v2)| v1.bind(py).eq(v2))
.all(|r| r.unwrap_or(false)))
- .into_py(py)),
- CompareOp::Ne => Ok((self.inner.size() != other.inner.size()
+ .into_pyobject(py)
+ .map_err(Into::into)
+ .map(BoundObject::into_any)
+ .map(BoundObject::unbind),
+ CompareOp::Ne => (self.inner.size() != other.inner.size()
|| self
.inner
.iter()
.map(|(k1, v1)| (v1, other.inner.get(k1)))
- .map(|(v1, v2)| PyAny::ne(v1.extract(py)?, v2))
+ .map(|(v1, v2)| v1.bind(py).ne(v2))
.all(|r| r.unwrap_or(true)))
- .into_py(py)),
+ .into_pyobject(py)
+ .map_err(Into::into)
+ .map(BoundObject::into_any)
+ .map(BoundObject::unbind),
_ => Ok(py.NotImplemented()),
}
}
- fn __reduce__(slf: PyRef) -> (&PyType, (Vec<(Key, PyObject)>,)) {
+ fn __hash__(&self, py: Python) -> PyResult {
+ // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715
+
+ let mut hash_val = self
+ .inner
+ .iter()
+ .map(|(key, val)| {
+ let mut hasher = DefaultHasher::new();
+ let val_bound = val.bind(py);
+
+ let key_hash = key.hash;
+ let val_hash = val_bound.hash().map_err(|_| {
+ PyTypeError::new_err(format!(
+ "Unhashable type in HashTrieMap of key {}: {}",
+ key.inner
+ .bind(py)
+ .repr()
+ .and_then(|r| r.extract())
+ .unwrap_or(" error".to_string()),
+ val_bound
+ .repr()
+ .and_then(|r| r.extract())
+ .unwrap_or(" error".to_string())
+ ))
+ })?;
+
+ hasher.write_isize(key_hash);
+ hasher.write_isize(val_hash);
+
+ Ok(hasher.finish() as usize)
+ })
+ .try_fold(0, |acc: usize, x: PyResult| {
+ PyResult::::Ok(acc ^ hash_shuffle_bits(x?))
+ })?;
+
+ // factor in the number of entries in the collection
+ hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237);
+
+ // dispense patterns in the hash value
+ hash_val ^= (hash_val >> 11) ^ (hash_val >> 25);
+ hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923);
+
+ Ok(hash_val as isize)
+ }
+
+ fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec<(Key, PyObject)>,)) {
(
HashTrieMapPy::type_object(slf.py()),
(slf.inner
.iter()
- .map(|(k, v)| (k.clone(), v.clone()))
+ .map(|(k, v)| (k.clone_ref(slf.py()), v.clone_ref(slf.py())))
.collect(),),
)
}
#[classmethod]
- fn convert(_cls: &PyType, value: &PyAny, py: Python) -> PyResult {
+ fn convert(
+ _cls: &Bound<'_, PyType>,
+ value: Bound<'_, PyAny>,
+ py: Python,
+ ) -> PyResult {
if value.is_instance_of::() {
- Ok(value.into())
+ Ok(value.unbind())
} else {
- Ok(HashTrieMapPy::extract(value)?.into_py(py))
+ HashTrieMapPy::extract_bound(&value)?
+ .into_pyobject(py)
+ .map(BoundObject::into_any)
+ .map(BoundObject::unbind)
}
}
- fn get(&self, key: Key) -> Option<&PyObject> {
- self.inner.get(&key)
+ #[classmethod]
+ #[pyo3(signature = (keys, val=None))]
+ fn fromkeys(
+ _cls: &Bound<'_, PyType>,
+ keys: &Bound<'_, PyAny>,
+ val: Option<&Bound<'_, PyAny>>,
+ py: Python,
+ ) -> PyResult {
+ let mut inner = HashTrieMap::new_sync();
+ let none = py.None().into_bound(py);
+ let value = val.unwrap_or(&none);
+ for each in keys.try_iter()? {
+ let key = Key::extract_bound(&each?)?;
+ inner.insert_mut(key, value.clone().unbind());
+ }
+ Ok(HashTrieMapPy { inner })
+ }
+
+ #[pyo3(signature = (key, default=None))]
+ fn get(&self, key: Key, default: Option, py: Python) -> Option {
+ if let Some(value) = self.inner.get(&key) {
+ Some(value.clone_ref(py))
+ } else {
+ default
+ }
}
- fn keys(&self) -> Vec {
- self.inner.keys().cloned().collect()
+ fn keys(&self) -> KeysView {
+ KeysView {
+ inner: self.inner.clone(),
+ }
}
- fn values(&self) -> Vec<&PyObject> {
- self.inner.values().collect::>()
+ fn values(&self) -> ValuesView {
+ ValuesView {
+ inner: self.inner.clone(),
+ }
}
- fn items(&self) -> Vec<(Key, &PyObject)> {
- self.inner.iter().map(|(k, v)| (k.clone(), v)).collect()
+ fn items(&self) -> ItemsView {
+ ItemsView {
+ inner: self.inner.clone(),
+ }
}
fn discard(&self, key: Key) -> PyResult {
@@ -213,9 +322,9 @@ impl HashTrieMapPy {
}
}
- fn insert(&self, key: Key, value: &PyAny) -> HashTrieMapPy {
+ fn insert(&self, key: Key, value: Bound<'_, PyAny>) -> HashTrieMapPy {
HashTrieMapPy {
- inner: self.inner.insert(key, value.into()),
+ inner: self.inner.insert(key, value.unbind()),
}
}
@@ -229,17 +338,21 @@ impl HashTrieMapPy {
}
#[pyo3(signature = (*maps, **kwds))]
- fn update(&self, maps: &PyTuple, kwds: Option<&PyDict>) -> PyResult {
+ fn update(
+ &self,
+ maps: &Bound<'_, PyTuple>,
+ kwds: Option<&Bound<'_, PyDict>>,
+ ) -> PyResult {
let mut inner = self.inner.clone();
for value in maps {
- let map = HashTrieMapPy::extract(value)?;
+ let map = HashTrieMapPy::extract_bound(&value)?;
for (k, v) in &map.inner {
- inner.insert_mut(k.to_owned(), v.to_owned());
+ inner.insert_mut(k.clone_ref(value.py()), v.clone_ref(value.py()));
}
}
if let Some(kwds) = kwds {
for (k, v) in kwds {
- inner.insert_mut(Key::extract(k)?, v.extract()?);
+ inner.insert_mut(Key::extract_bound(&k)?, v.extract()?);
}
}
Ok(HashTrieMapPy { inner })
@@ -247,18 +360,398 @@ impl HashTrieMapPy {
}
#[pyclass(module = "rpds")]
-struct KeyIterator {
- inner: IntoIter,
+struct KeysIterator {
+ inner: HashTrieMapSync,
}
#[pymethods]
-impl KeyIterator {
+impl KeysIterator {
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
slf
}
fn __next__(mut slf: PyRefMut<'_, Self>) -> Option {
- slf.inner.next()
+ let first = slf.inner.keys().next()?.clone_ref(slf.py());
+ slf.inner = slf.inner.remove(&first);
+ Some(first)
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct ValuesIterator {
+ inner: HashTrieMapSync,
+}
+
+#[pymethods]
+impl ValuesIterator {
+ fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
+ slf
+ }
+
+ fn __next__(mut slf: PyRefMut<'_, Self>) -> Option {
+ let kv = slf.inner.iter().next()?;
+ let value = kv.1.clone_ref(slf.py());
+ slf.inner = slf.inner.remove(kv.0);
+ Some(value)
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct ItemsIterator {
+ inner: HashTrieMapSync,
+}
+
+#[pymethods]
+impl ItemsIterator {
+ fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
+ slf
+ }
+
+ fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<(Key, PyObject)> {
+ let kv = slf.inner.iter().next()?;
+ let key = kv.0.clone_ref(slf.py());
+ let value = kv.1.clone_ref(slf.py());
+
+ slf.inner = slf.inner.remove(kv.0);
+
+ Some((key, value))
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct KeysView {
+ inner: HashTrieMapSync,
+}
+
+#[pymethods]
+impl KeysView {
+ fn __contains__(&self, key: Key) -> bool {
+ self.inner.contains_key(&key)
+ }
+
+ fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
+ return Ok(false);
+ }
+
+ for each in slf.inner.keys() {
+ if !other.contains(each.inner.clone_ref(slf.py()))? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
+ return Ok(false);
+ }
+
+ for each in slf.inner.keys() {
+ if !other.contains(each.inner.clone_ref(slf.py()))? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ if !slf.inner.contains_key(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator {
+ KeysIterator {
+ inner: slf.inner.clone(),
+ }
+ }
+
+ fn __len__(slf: PyRef<'_, Self>) -> usize {
+ slf.inner.size()
+ }
+
+ fn __and__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult {
+ KeysView::intersection(slf, other)
+ }
+
+ fn __or__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ KeysView::union(slf, other, py)
+ }
+
+ fn __repr__(&self, py: Python) -> PyResult {
+ let contents = self.inner.into_iter().map(|(k, _)| {
+ Ok(k.clone_ref(py)
+ .inner
+ .into_pyobject(py)?
+ .call_method0("__repr__")
+ .and_then(|r| r.extract())
+ .unwrap_or("".to_owned()))
+ });
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("keys_view({{{}}})", contents.join(", ")))
+ }
+
+ fn intersection(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult {
+ // TODO: iterate over the shorter one if it's got a length
+ let mut inner = HashTrieSet::new_sync();
+ for each in other.try_iter()? {
+ let key = Key::extract_bound(&each?)?;
+ if slf.inner.contains_key(&key) {
+ inner.insert_mut(key);
+ }
+ }
+ Ok(HashTrieSetPy { inner })
+ }
+
+ fn union(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ // There doesn't seem to be a low-effort way to get a HashTrieSet out of a map,
+ // so we just keep our map and add values we'll ignore.
+ let mut inner = slf.inner.clone();
+ for each in other.try_iter()? {
+ inner.insert_mut(Key::extract_bound(&each?)?, py.None());
+ }
+ Ok(KeysView { inner })
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct ValuesView {
+ inner: HashTrieMapSync,
+}
+
+#[pymethods]
+impl ValuesView {
+ fn __iter__(slf: PyRef<'_, Self>) -> ValuesIterator {
+ ValuesIterator {
+ inner: slf.inner.clone(),
+ }
+ }
+
+ fn __len__(slf: PyRef<'_, Self>) -> usize {
+ slf.inner.size()
+ }
+
+ fn __repr__(&self, py: Python) -> PyResult {
+ let contents = self.inner.into_iter().map(|(_, v)| {
+ Ok(v.into_pyobject(py)?
+ .call_method0("__repr__")
+ .and_then(|r| r.extract())
+ .unwrap_or("".to_owned()))
+ });
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("values_view([{}])", contents.join(", ")))
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct ItemsView {
+ inner: HashTrieMapSync,
+}
+
+#[derive(FromPyObject)]
+struct ItemViewQuery(Key, PyObject);
+
+#[pymethods]
+impl ItemsView {
+ fn __contains__(slf: PyRef<'_, Self>, item: ItemViewQuery) -> PyResult {
+ if let Some(value) = slf.inner.get(&item.0) {
+ return item.1.bind(slf.py()).eq(value);
+ }
+
+ Ok(false)
+ }
+
+ fn __iter__(slf: PyRef<'_, Self>) -> ItemsIterator {
+ ItemsIterator {
+ inner: slf.inner.clone(),
+ }
+ }
+
+ fn __len__(slf: PyRef<'_, Self>) -> usize {
+ slf.inner.size()
+ }
+
+ fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
+ return Ok(false);
+ }
+ for (k, v) in slf.inner.iter() {
+ if !other.contains((k.inner.clone_ref(slf.py()), v))? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __repr__(&self, py: Python) -> PyResult {
+ let contents = self.inner.into_iter().map(|(k, v)| {
+ let tuple = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
+ Ok(format!("{:?}", tuple))
+ });
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("items_view([{}])", contents.join(", ")))
+ }
+
+ fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
+ return Ok(false);
+ }
+ for (k, v) in slf.inner.iter() {
+ let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
+ // FIXME: needs to compare
+ if !other.contains(pair)? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
+ return Ok(false);
+ }
+ for (k, v) in slf.inner.iter() {
+ let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
+ // FIXME: needs to compare
+ if !other.contains(pair)? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ let kv = each?;
+ let k = kv.get_item(0)?;
+ match slf.inner.get(&Key::extract_bound(&k)?) {
+ Some(value) => {
+ let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
+ if !pair.eq(kv)? {
+ return Ok(false);
+ }
+ }
+ None => return Ok(false),
+ }
+ }
+ Ok(true)
+ }
+
+ fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ let kv = each?;
+ let k = kv.get_item(0)?;
+ match slf.inner.get(&Key::extract_bound(&k)?) {
+ Some(value) => {
+ let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
+ if !pair.eq(kv)? {
+ return Ok(false);
+ }
+ }
+ None => return Ok(false),
+ }
+ }
+ Ok(true)
+ }
+
+ fn __and__(
+ slf: PyRef<'_, Self>,
+ other: &Bound<'_, PyAny>,
+ py: Python,
+ ) -> PyResult {
+ ItemsView::intersection(slf, other, py)
+ }
+
+ fn __or__(
+ slf: PyRef<'_, Self>,
+ other: &Bound<'_, PyAny>,
+ py: Python,
+ ) -> PyResult {
+ ItemsView::union(slf, other, py)
+ }
+
+ fn intersection(
+ slf: PyRef<'_, Self>,
+ other: &Bound<'_, PyAny>,
+ py: Python,
+ ) -> PyResult {
+ // TODO: iterate over the shorter one if it's got a length
+ let mut inner = HashTrieSet::new_sync();
+ for each in other.try_iter()? {
+ let kv = each?;
+ let k = kv.get_item(0)?;
+ if let Some(value) = slf.inner.get(&Key::extract_bound(&k)?) {
+ let pair = PyTuple::new(py, [k, value.bind(py).clone()])?;
+ if pair.eq(kv)? {
+ inner.insert_mut(Key::extract_bound(&pair)?);
+ }
+ }
+ }
+ Ok(HashTrieSetPy { inner })
+ }
+
+ fn union(
+ slf: PyRef<'_, Self>,
+ other: &Bound<'_, PyAny>,
+ py: Python,
+ ) -> PyResult {
+ // TODO: this is very inefficient, but again can't seem to get a HashTrieSet out of ourself
+ let mut inner = HashTrieSet::new_sync();
+ for (k, v) in slf.inner.iter() {
+ let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?;
+ inner.insert_mut(Key::extract_bound(&pair)?);
+ }
+ for each in other.try_iter()? {
+ inner.insert_mut(Key::extract_bound(&each?)?);
+ }
+ Ok(HashTrieSetPy { inner })
}
}
@@ -269,9 +762,9 @@ struct HashTrieSetPy {
}
impl<'source> FromPyObject<'source> for HashTrieSetPy {
- fn extract(ob: &'source PyAny) -> PyResult {
+ fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult {
let mut ret = HashTrieSet::new_sync();
- for each in ob.iter()? {
+ for each in ob.try_iter()? {
let k: Key = each?.extract()?;
ret.insert_mut(k);
}
@@ -279,13 +772,10 @@ impl<'source> FromPyObject<'source> for HashTrieSetPy {
}
}
-fn is_subset(one: &HashTrieSetSync, two: &HashTrieSetSync) -> bool {
- one.iter().all(|v| two.contains(v))
-}
-
#[pymethods]
impl HashTrieSetPy {
#[new]
+ #[pyo3(signature = (value=None))]
fn init(value: Option) -> Self {
if let Some(value) = value {
value
@@ -296,70 +786,136 @@ impl HashTrieSetPy {
}
}
- fn __and__(&self, other: &Self) -> Self {
- self.intersection(other)
+ fn __contains__(&self, key: Key) -> bool {
+ self.inner.contains(&key)
+ }
+
+ fn __and__(&self, other: &Self, py: Python) -> Self {
+ self.intersection(other, py)
}
- fn __or__(&self, other: &Self) -> Self {
- self.union(other)
+ fn __or__(&self, other: &Self, py: Python) -> Self {
+ self.union(other, py)
}
fn __sub__(&self, other: &Self) -> Self {
self.difference(other)
}
- fn __xor__(&self, other: &Self) -> Self {
- self.symmetric_difference(other)
+ fn __xor__(&self, other: &Self, py: Python) -> Self {
+ self.symmetric_difference(other, py)
}
- fn __iter__(slf: PyRef<'_, Self>) -> PyResult> {
- let iter = slf
- .inner
- .iter()
- .map(|k| k.to_owned())
- .collect::>()
- .into_iter();
- Py::new(slf.py(), KeyIterator { inner: iter })
+ fn __iter__(slf: PyRef<'_, Self>) -> SetIterator {
+ SetIterator {
+ inner: slf.inner.clone(),
+ }
}
fn __len__(&self) -> usize {
self.inner.size()
}
- fn __repr__(&self, py: Python) -> String {
+ fn __repr__(&self, py: Python) -> PyResult {
let contents = self.inner.into_iter().map(|k| {
- k.clone()
- .into_py(py)
- .call_method0(py, "__repr__")
- .and_then(|r| r.extract(py))
- .unwrap_or("".to_owned())
+ Ok(k.clone_ref(py)
+ .into_pyobject(py)?
+ .call_method0("__repr__")
+ .and_then(|r| r.extract())
+ .unwrap_or("".to_owned()))
});
- format!(
- "HashTrieSet({{{}}})",
- contents.collect::>().join(", ")
- )
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("HashTrieSet({{{}}})", contents.join(", ")))
}
- fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyResult {
- match op {
- CompareOp::Eq => Ok((self.inner.size() == other.inner.size()
- && is_subset(&self.inner, &other.inner))
- .into_py(py)),
- CompareOp::Ne => Ok((self.inner.size() != other.inner.size()
- || self.inner.iter().any(|k| !other.inner.contains(k)))
- .into_py(py)),
- CompareOp::Lt => Ok((self.inner.size() < other.inner.size()
- && is_subset(&self.inner, &other.inner))
- .into_py(py)),
- CompareOp::Le => Ok(is_subset(&self.inner, &other.inner).into_py(py)),
- _ => Ok(py.NotImplemented()),
+ fn __eq__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() {
+ return Ok(false);
}
+ for each in other.try_iter()? {
+ if !slf.inner.contains(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
}
- fn __reduce__(slf: PyRef) -> (&PyType, (Vec,)) {
+ fn __hash__(&self) -> PyResult {
+ // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715
+
+ let mut hash_val = self
+ .inner
+ .iter()
+ .map(|k| k.hash as usize)
+ .fold(0, |acc: usize, x: usize| acc ^ hash_shuffle_bits(x));
+
+ // factor in the number of entries in the collection
+ hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237);
+
+ // dispense patterns in the hash value
+ hash_val ^= (hash_val >> 11) ^ (hash_val >> 25);
+ hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923);
+
+ Ok(hash_val as isize)
+ }
+
+ fn __lt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() {
+ return Ok(false);
+ }
+ for each in slf.inner.iter() {
+ if !other.contains(each.inner.clone_ref(py))? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __le__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() {
+ return Ok(false);
+ }
+ for each in slf.inner.iter() {
+ if !other.contains(each.inner.clone_ref(slf.py()))? {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __gt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ if !slf.inner.contains(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __ge__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult {
+ let abc = PyModule::import(py, "collections.abc")?;
+ if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() {
+ return Ok(false);
+ }
+ for each in other.try_iter()? {
+ if !slf.inner.contains(&Key::extract_bound(&each?)?) {
+ return Ok(false);
+ }
+ }
+ Ok(true)
+ }
+
+ fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec,)) {
(
HashTrieSetPy::type_object(slf.py()),
- (slf.inner.iter().cloned().collect(),),
+ (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),),
)
}
@@ -397,7 +953,7 @@ impl HashTrieSetPy {
HashTrieSetPy { inner }
}
- fn intersection(&self, other: &Self) -> HashTrieSetPy {
+ fn intersection(&self, other: &Self, py: Python) -> HashTrieSetPy {
let mut inner: HashTrieSetSync = HashTrieSet::new_sync();
let larger: &HashTrieSetSync;
let iter;
@@ -410,13 +966,13 @@ impl HashTrieSetPy {
}
for value in iter {
if larger.contains(value) {
- inner.insert_mut(value.to_owned());
+ inner.insert_mut(value.clone_ref(py));
}
}
HashTrieSetPy { inner }
}
- fn symmetric_difference(&self, other: &Self) -> HashTrieSetPy {
+ fn symmetric_difference(&self, other: &Self, py: Python) -> HashTrieSetPy {
let mut inner: HashTrieSetSync;
let iter;
if self.inner.size() > other.inner.size() {
@@ -430,13 +986,13 @@ impl HashTrieSetPy {
if inner.contains(value) {
inner.remove_mut(value);
} else {
- inner.insert_mut(value.to_owned());
+ inner.insert_mut(value.clone_ref(py));
}
}
HashTrieSetPy { inner }
}
- fn union(&self, other: &Self) -> HashTrieSetPy {
+ fn union(&self, other: &Self, py: Python) -> HashTrieSetPy {
let mut inner: HashTrieSetSync;
let iter;
if self.inner.size() > other.inner.size() {
@@ -447,24 +1003,42 @@ impl HashTrieSetPy {
iter = self.inner.iter();
}
for value in iter {
- inner.insert_mut(value.to_owned());
+ inner.insert_mut(value.clone_ref(py));
}
HashTrieSetPy { inner }
}
#[pyo3(signature = (*iterables))]
- fn update(&self, iterables: &PyTuple) -> PyResult {
+ fn update(&self, iterables: Bound<'_, PyTuple>) -> PyResult {
let mut inner = self.inner.clone();
for each in iterables {
- let iter = each.iter()?;
+ let iter = each.try_iter()?;
for value in iter {
- inner.insert_mut(Key::extract(value?)?.to_owned());
+ inner.insert_mut(Key::extract_bound(&value?)?);
}
}
Ok(HashTrieSetPy { inner })
}
}
+#[pyclass(module = "rpds")]
+struct SetIterator {
+ inner: HashTrieSetSync,
+}
+
+#[pymethods]
+impl SetIterator {
+ fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
+ slf
+ }
+
+ fn __next__(mut slf: PyRefMut<'_, Self>) -> Option {
+ let first = slf.inner.iter().next()?.clone_ref(slf.py());
+ slf.inner = slf.inner.remove(&first);
+ Some(first)
+ }
+}
+
#[repr(transparent)]
#[pyclass(name = "List", module = "rpds", frozen, sequence)]
struct ListPy {
@@ -478,10 +1052,10 @@ impl From> for ListPy {
}
impl<'source> FromPyObject<'source> for ListPy {
- fn extract(ob: &'source PyAny) -> PyResult {
+ fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult {
let mut ret = List::new_sync();
let reversed = PyModule::import(ob.py(), "builtins")?.getattr("reversed")?;
- let rob: &PyIterator = reversed.call1((ob,))?.iter()?;
+ let rob: Bound<'_, PyIterator> = reversed.call1((ob,))?.try_iter()?;
for each in rob {
ret.push_front_mut(each?.extract()?);
}
@@ -493,7 +1067,7 @@ impl<'source> FromPyObject<'source> for ListPy {
impl ListPy {
#[new]
#[pyo3(signature = (*elements))]
- fn init(elements: &PyTuple) -> PyResult {
+ fn init(elements: &Bound<'_, PyTuple>) -> PyResult {
let mut ret: ListPy;
if elements.len() == 1 {
ret = elements.get_item(0)?.extract()?;
@@ -515,46 +1089,74 @@ impl ListPy {
self.inner.len()
}
- fn __repr__(&self, py: Python) -> String {
+ fn __repr__(&self, py: Python) -> PyResult {
let contents = self.inner.into_iter().map(|k| {
- k.into_py(py)
- .call_method0(py, "__repr__")
- .and_then(|r| r.extract(py))
- .unwrap_or("".to_owned())
+ Ok(k.into_pyobject(py)?
+ .call_method0("__repr__")
+ .and_then(|r| r.extract())
+ .unwrap_or("".to_owned()))
});
- format!("List([{}])", contents.collect::>().join(", "))
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("List([{}])", contents.join(", ")))
}
fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyResult {
match op {
- CompareOp::Eq => Ok((self.inner.len() == other.inner.len()
+ CompareOp::Eq => (self.inner.len() == other.inner.len()
&& self
.inner
.iter()
.zip(other.inner.iter())
- .map(|(e1, e2)| PyAny::eq(e1.extract(py)?, e2))
+ .map(|(e1, e2)| e1.bind(py).eq(e2))
.all(|r| r.unwrap_or(false)))
- .into_py(py)),
- CompareOp::Ne => Ok((self.inner.len() != other.inner.len()
+ .into_pyobject(py)
+ .map_err(Into::into)
+ .map(BoundObject::into_any)
+ .map(BoundObject::unbind),
+ CompareOp::Ne => (self.inner.len() != other.inner.len()
|| self
.inner
.iter()
.zip(other.inner.iter())
- .map(|(e1, e2)| PyAny::ne(e1.extract(py)?, e2))
+ .map(|(e1, e2)| e1.bind(py).ne(e2))
.any(|r| r.unwrap_or(true)))
- .into_py(py)),
+ .into_pyobject(py)
+ .map_err(Into::into)
+ .map(BoundObject::into_any)
+ .map(BoundObject::unbind),
_ => Ok(py.NotImplemented()),
}
}
- fn __iter__(slf: PyRef<'_, Self>) -> PyResult> {
- let iter = slf
- .inner
+ fn __hash__(&self, py: Python) -> PyResult {
+ let mut hasher = DefaultHasher::new();
+
+ self.inner
.iter()
- .map(|k| k.to_owned())
- .collect::>()
- .into_iter();
- Py::new(slf.py(), ListIterator { inner: iter })
+ .enumerate()
+ .try_for_each(|(index, each)| {
+ each.bind(py)
+ .hash()
+ .map_err(|_| {
+ PyTypeError::new_err(format!(
+ "Unhashable type at {} element in List: {}",
+ index,
+ each.bind(py)
+ .repr()
+ .and_then(|r| r.extract())
+ .unwrap_or(" error".to_string())
+ ))
+ })
+ .map(|x| hasher.write_isize(x))
+ })?;
+
+ Ok(hasher.finish())
+ }
+
+ fn __iter__(slf: PyRef<'_, Self>) -> ListIterator {
+ ListIterator {
+ inner: slf.inner.clone(),
+ }
}
fn __reversed__(&self) -> ListPy {
@@ -563,10 +1165,10 @@ impl ListPy {
}
}
- fn __reduce__(slf: PyRef) -> (&PyType, (Vec,)) {
+ fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec,)) {
(
ListPy::type_object(slf.py()),
- (slf.inner.iter().cloned().collect(),),
+ (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),),
)
}
@@ -601,7 +1203,7 @@ impl ListPy {
#[pyclass(module = "rpds")]
struct ListIterator {
- inner: IntoIter,
+ inner: ListSync,
}
#[pymethods]
@@ -611,16 +1213,200 @@ impl ListIterator {
}
fn __next__(mut slf: PyRefMut<'_, Self>) -> Option {
- slf.inner.next()
+ let first_op = slf.inner.first()?;
+ let first = first_op.clone_ref(slf.py());
+
+ slf.inner = slf.inner.drop_first()?;
+
+ Some(first)
+ }
+}
+
+#[pyclass(module = "rpds")]
+struct QueueIterator {
+ inner: QueueSync,
+}
+
+#[pymethods]
+impl QueueIterator {
+ fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
+ slf
+ }
+
+ fn __next__(mut slf: PyRefMut<'_, Self>) -> Option {
+ let first_op = slf.inner.peek()?;
+ let first = first_op.clone_ref(slf.py());
+ slf.inner = slf.inner.dequeue()?;
+ Some(first)
+ }
+}
+
+#[repr(transparent)]
+#[pyclass(name = "Queue", module = "rpds", frozen, sequence)]
+struct QueuePy {
+ inner: QueueSync,
+}
+
+impl From> for QueuePy {
+ fn from(elements: QueueSync) -> Self {
+ QueuePy { inner: elements }
+ }
+}
+
+impl<'source> FromPyObject<'source> for QueuePy {
+ fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult {
+ let mut ret = Queue::new_sync();
+ for each in ob.try_iter()? {
+ ret.enqueue_mut(each?.extract()?);
+ }
+ Ok(QueuePy { inner: ret })
+ }
+}
+
+#[pymethods]
+impl QueuePy {
+ #[new]
+ #[pyo3(signature = (*elements))]
+ fn init(elements: &Bound<'_, PyTuple>, py: Python<'_>) -> PyResult {
+ let mut ret: QueuePy;
+ if elements.len() == 1 {
+ ret = elements.get_item(0)?.extract()?;
+ } else {
+ ret = QueuePy {
+ inner: Queue::new_sync(),
+ };
+ if elements.len() > 1 {
+ for each in elements {
+ ret.inner.enqueue_mut(each.into_pyobject(py)?.unbind());
+ }
+ }
+ }
+ Ok(ret)
+ }
+
+ fn __eq__(&self, other: &Self, py: Python<'_>) -> bool {
+ (self.inner.len() == other.inner.len())
+ && self
+ .inner
+ .iter()
+ .zip(other.inner.iter())
+ .map(|(e1, e2)| e1.bind(py).eq(e2))
+ .all(|r| r.unwrap_or(false))
+ }
+
+ fn __hash__(&self, py: Python<'_>) -> PyResult {
+ let mut hasher = DefaultHasher::new();
+
+ self.inner
+ .iter()
+ .enumerate()
+ .try_for_each(|(index, each)| {
+ each.bind(py)
+ .hash()
+ .map_err(|_| {
+ PyTypeError::new_err(format!(
+ "Unhashable type at {} element in Queue: {}",
+ index,
+ each.bind(py)
+ .repr()
+ .and_then(|r| r.extract())
+ .unwrap_or(" error".to_string())
+ ))
+ })
+ .map(|x| hasher.write_isize(x))
+ })?;
+
+ Ok(hasher.finish())
+ }
+
+ fn __ne__(&self, other: &Self, py: Python<'_>) -> bool {
+ (self.inner.len() != other.inner.len())
+ || self
+ .inner
+ .iter()
+ .zip(other.inner.iter())
+ .map(|(e1, e2)| e1.bind(py).ne(e2))
+ .any(|r| r.unwrap_or(true))
+ }
+
+ fn __iter__(slf: PyRef<'_, Self>) -> QueueIterator {
+ QueueIterator {
+ inner: slf.inner.clone(),
+ }
+ }
+
+ fn __len__(&self) -> usize {
+ self.inner.len()
+ }
+
+ fn __repr__(&self, py: Python) -> PyResult {
+ let contents = self.inner.into_iter().map(|k| {
+ Ok(k.into_pyobject(py)?
+ .call_method0("__repr__")
+ .and_then(|r| r.extract())
+ .unwrap_or("".to_owned()))
+ });
+ let contents = contents.collect::, PyErr>>()?;
+ Ok(format!("Queue([{}])", contents.join(", ")))
+ }
+
+ #[getter]
+ fn peek(&self, py: Python) -> PyResult {
+ if let Some(peeked) = self.inner.peek() {
+ Ok(peeked.clone_ref(py))
+ } else {
+ Err(PyIndexError::new_err("peeked an empty queue"))
+ }
+ }
+
+ #[getter]
+ fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ fn enqueue(&self, value: Bound<'_, PyAny>) -> Self {
+ QueuePy {
+ inner: self.inner.enqueue(value.into()),
+ }
+ }
+
+ fn dequeue(&self) -> PyResult {
+ if let Some(inner) = self.inner.dequeue() {
+ Ok(QueuePy { inner })
+ } else {
+ Err(PyIndexError::new_err("dequeued an empty queue"))
+ }
}
}
-#[pymodule]
+#[pymodule(gil_used = false)]
#[pyo3(name = "rpds")]
-fn rpds_py(py: Python, m: &PyModule) -> PyResult<()> {
+fn rpds_py(py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_class::()?;
- PyMapping::register::(py)?;
m.add_class::()?;
m.add_class::()?;
+ m.add_class::()?;
+
+ PyMapping::register::(py)?;
+
+ let abc = PyModule::import(py, "collections.abc")?;
+
+ abc.getattr("Set")?
+ .call_method1("register", (HashTrieSetPy::type_object(py),))?;
+
+ abc.getattr("MappingView")?
+ .call_method1("register", (KeysView::type_object(py),))?;
+ abc.getattr("MappingView")?
+ .call_method1("register", (ValuesView::type_object(py),))?;
+ abc.getattr("MappingView")?
+ .call_method1("register", (ItemsView::type_object(py),))?;
+
+ abc.getattr("KeysView")?
+ .call_method1("register", (KeysView::type_object(py),))?;
+ abc.getattr("ValuesView")?
+ .call_method1("register", (ValuesView::type_object(py),))?;
+ abc.getattr("ItemsView")?
+ .call_method1("register", (ItemsView::type_object(py),))?;
+
Ok(())
}
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/requirements.in b/tests/requirements.in
index e079f8a..8dc90bd 100644
--- a/tests/requirements.in
+++ b/tests/requirements.in
@@ -1 +1,3 @@
+file:.#egg=rpds-py
pytest
+pytest-run-parallel
diff --git a/tests/requirements.txt b/tests/requirements.txt
index 8b2acf1..6d2db2e 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -1,14 +1,16 @@
-#
-# This file is autogenerated by pip-compile with Python 3.11
-# by the following command:
-#
-# pip-compile tests/requirements.in
-#
-iniconfig==2.0.0
+# This file was autogenerated by uv via the following command:
+# uv pip compile --output-file /Users/julian/Development/rpds.py/tests/requirements.txt tests/requirements.in
+iniconfig==2.1.0
# via pytest
-packaging==23.1
+packaging==25.0
# via pytest
-pluggy==1.3.0
+pluggy==1.6.0
# via pytest
-pytest==7.4.2
+pytest==8.3.5
+ # via
+ # -r tests/requirements.in
+ # pytest-run-parallel
+pytest-run-parallel==0.4.2
+ # via -r tests/requirements.in
+rpds-py @ file:.#egg=rpds-py
# via -r tests/requirements.in
diff --git a/tests/test_hash_trie_map.py b/tests/test_hash_trie_map.py
index 2f3db07..3dec414 100644
--- a/tests/test_hash_trie_map.py
+++ b/tests/test_hash_trie_map.py
@@ -26,23 +26,33 @@
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
-from collections.abc import Hashable, Mapping
+
+from collections import abc
+from operator import methodcaller
import pickle
+import sysconfig
import pytest
from rpds import HashTrieMap
-HASH_MSG = "Not sure HashTrieMap implements Hash, it has mutable methods"
+# see https://github.com/python/cpython/issues/127065,
+# remove this when the CPython bug is fixed in a released version
+if bool(sysconfig.get_config_var("Py_GIL_DISABLED")):
+
+ def methodcaller(name, /, *args, **kwargs):
+ def caller(obj):
+ return getattr(obj, name)(*args, **kwargs)
+
+ return caller
-@pytest.mark.xfail(reason=HASH_MSG)
def test_instance_of_hashable():
- assert isinstance(HashTrieMap(), Hashable)
+ assert isinstance(HashTrieMap(), abc.Hashable)
def test_instance_of_map():
- assert isinstance(HashTrieMap(), Mapping)
+ assert isinstance(HashTrieMap(), abc.Mapping)
def test_literalish_works():
@@ -66,7 +76,7 @@ def test_initialization_with_one_element():
assert "a" not in empty_map
-def test_get_non_existing_raises_key_error():
+def test_index_non_existing_raises_key_error():
m1 = HashTrieMap()
with pytest.raises(KeyError) as error:
m1["foo"]
@@ -152,12 +162,22 @@ def test_overwrite_existing_element():
assert map2["a"] == 3
-@pytest.mark.xfail(reason=HASH_MSG)
-def test_hash():
- x = HashTrieMap(a=1, b=2, c=3)
- y = HashTrieMap(a=1, b=2, c=3)
+def test_hashing():
+ o = object()
+
+ assert hash(HashTrieMap([(o, o), (1, o)])) == hash(
+ HashTrieMap([(o, o), (1, o)]),
+ )
+ assert hash(HashTrieMap([(o, o), (1, o)])) == hash(
+ HashTrieMap([(1, o), (o, o)]),
+ )
+ assert hash(HashTrieMap([(o, "foo")])) == hash(HashTrieMap([(o, "foo")]))
+ assert hash(HashTrieMap()) == hash(HashTrieMap([]))
- assert hash(x) == hash(y)
+ assert hash(HashTrieMap({1: 2})) != hash(HashTrieMap({1: 3}))
+ assert hash(HashTrieMap({o: 1})) != hash(HashTrieMap({o: o}))
+ assert hash(HashTrieMap([])) != hash(HashTrieMap([(o, 1)]))
+ assert hash(HashTrieMap({1: 2, 3: 4})) != hash(HashTrieMap({1: 3, 2: 4}))
def test_same_hash_when_content_the_same_but_underlying_vector_size_differs():
@@ -169,7 +189,7 @@ def test_same_hash_when_content_the_same_but_underlying_vector_size_differs():
x = x.remove(z)
assert x == y
- # assert hash(x) == hash(y)
+ # assert hash(x) == hash(y) # noqa: ERA001
class HashabilityControlled:
@@ -181,13 +201,17 @@ def __hash__(self):
raise ValueError("I am not currently hashable.")
-@pytest.mark.xfail(reason=HASH_MSG)
def test_map_does_not_hash_values_on_second_hash_invocation():
hashable = HashabilityControlled()
x = HashTrieMap(dict(el=hashable))
hash(x)
+
hashable.hashable = False
- hash(x)
+ with pytest.raises(
+ TypeError,
+ match=r"Unhashable type in HashTrieMap of key 'el'",
+ ):
+ hash(x)
def test_equal():
@@ -263,7 +287,7 @@ def __eq__(self, other):
def test_iteration_with_many_elements():
- values = list(range(0, 2000))
+ values = list(range(2000))
keys = [str(x) for x in values]
init_dict = dict(zip(keys, values))
@@ -283,8 +307,16 @@ def test_iteration_with_many_elements():
actual_values.add(v)
actual_keys.add(k)
- assert actual_keys == set(keys + [hash_dummy1, hash_dummy2])
- assert actual_values == set(values + [12345, 54321])
+ assert actual_keys == {*keys, hash_dummy1, hash_dummy2}
+ assert actual_values == {*values, 12345, 54321}
+
+
+def test_repr():
+ rep = repr(HashTrieMap({"foo": "12", "": 37}))
+ assert rep in {
+ "HashTrieMap({'foo': '12', '': 37})",
+ "HashTrieMap({'': 37, 'foo': '12'})",
+ }
def test_str():
@@ -312,8 +344,10 @@ def test_fast_convert_hashtriemap():
assert HashTrieMap.convert(m) is m
+# Non-pyrsistent-test-suite tests
+
+
def test_more_eq():
- # Non-pyrsistent-test-suite test
o = object()
assert HashTrieMap([(o, o), (1, o)]) == HashTrieMap([(o, o), (1, o)])
@@ -327,5 +361,209 @@ def test_more_eq():
def test_pickle():
assert pickle.loads(
- pickle.dumps(HashTrieMap([(1, 2), (3, 4)]))
+ pickle.dumps(HashTrieMap([(1, 2), (3, 4)])),
) == HashTrieMap([(1, 2), (3, 4)])
+
+
+def test_get():
+ m1 = HashTrieMap({"foo": "bar"})
+ assert m1.get("foo") == "bar"
+ assert m1.get("baz") is None
+ assert m1.get("spam", "eggs") == "eggs"
+
+
+@pytest.mark.parametrize(
+ "view",
+ [pytest.param(methodcaller(p), id=p) for p in ["keys", "values", "items"]],
+)
+@pytest.mark.parametrize(
+ "cls",
+ [
+ abc.Set,
+ abc.MappingView,
+ abc.KeysView,
+ abc.ValuesView,
+ abc.ItemsView,
+ ],
+)
+def test_views_abc(view, cls):
+ m, d = HashTrieMap(), {}
+ assert isinstance(view(m), cls) == isinstance(view(d), cls)
+
+
+def test_keys():
+ d = HashTrieMap({1: 2, 3: 4})
+ k = d.keys()
+
+ assert 1 in k
+ assert 2 not in k
+ assert object() not in k
+
+ assert len(k) == 2
+
+ assert k == d.keys()
+ assert k == HashTrieMap({1: 2, 3: 4}).keys()
+ assert k == {1, 3}
+
+ assert k != iter({1, 3})
+ assert k != {1, 2, 3}
+ assert k != {1, 4}
+ assert not k == {1, 4}
+
+ assert k != object()
+
+
+def test_keys_setlike():
+ assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() == {1}
+ assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() != {1, 2}
+ assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() == {1}
+ assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() != {2}
+ assert not HashTrieMap({1: 2}).keys() & {}.keys()
+ assert HashTrieMap({1: 2}).keys() & {1} == {1}
+ assert HashTrieMap({1: 2}).keys() & [1] == {1}
+
+ assert HashTrieMap({1: 2}).keys() | {3} == {1, 3}
+ assert HashTrieMap({1: 2}).keys() | [3] == {1, 3}
+
+ # these don't really exist on the KeysView protocol but it's nice to have
+ s = (1, "foo")
+ assert HashTrieMap({1: 2, "foo": 7}).keys().intersection(s) == set(s)
+ assert not HashTrieMap({1: 2}).keys().intersection({})
+ assert HashTrieMap({1: 2}).keys().union({3}) == {1, 3}
+
+ assert HashTrieMap({1: 2, 3: 4}).keys() < {1, 2, 3}
+ assert HashTrieMap({1: 2, 3: 4}).keys() <= {1, 2, 3}
+ assert not HashTrieMap({1: 2}).keys() < {1}
+ assert HashTrieMap({1: 2}).keys() > set()
+ assert HashTrieMap({1: 2}).keys() >= set()
+
+
+def test_keys_repr():
+ m = HashTrieMap({"foo": 3, 37: "bar"})
+ assert repr(m.keys()) in {
+ "keys_view({'foo', 37})",
+ "keys_view({37, 'foo'})",
+ }
+
+
+def test_values():
+ d = HashTrieMap({1: 2, 3: 4})
+ v = d.values()
+
+ assert 2 in v
+ assert 3 not in v
+ assert object() not in v
+
+ assert len(v) == 2
+
+ assert v == v
+ # https://bugs.python.org/issue12445 which was WONTFIXed
+ assert v != HashTrieMap({1: 2, 3: 4}).values()
+ assert v != [2, 4]
+
+ assert set(v) == {2, 4}
+
+
+def test_values_repr():
+ m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3})
+ assert repr(m.values()) in {
+ "values_view(['bar', 3, 3])",
+ "values_view([3, 'bar', 3])",
+ "values_view([3, 3, 'bar'])",
+ }
+
+
+def test_items():
+ d = HashTrieMap({1: 2, 3: 4})
+ i = d.items()
+
+ assert (1, 2) in i
+ assert (1, 4) not in i
+
+ assert len(i) == 2
+
+ assert i == d.items()
+ assert i == HashTrieMap({1: 2, 3: 4}).items()
+ assert i == {(1, 2), (3, 4)}
+
+ assert i != iter({(1, 2), (3, 4)})
+ assert i != {(1, 2, 3), (3, 4, 5)}
+ assert i == {1: 2, 3: 4}.items()
+ assert i != {(1, 2), (3, 4), (5, 6)}
+ assert i != {(1, 2)}
+ assert not i == {1, 4}
+
+ assert i != object()
+
+
+def test_items_setlike():
+ assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() == {(1, 2)}
+ assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() != {(1, 2), 3}
+
+ assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() == {(1, 2)}
+ assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() != {(3, 4)}
+ assert not HashTrieMap({1: 2}).items() & {}.items()
+
+ assert HashTrieMap({1: 2}).items() & [(1, 2)] == {(1, 2)}
+ assert HashTrieMap({1: 2}).items() & [[1, 2]] == set()
+
+ assert HashTrieMap({1: 2}).items() | {(3, 4)} == {(1, 2), (3, 4)}
+ assert HashTrieMap({1: 2}).items() | [7] == {(1, 2), 7}
+
+ s = ((1, 2), ("foo", 37))
+ assert HashTrieMap({1: 2, "foo": 7}).items().intersection(s) == {(1, 2)}
+ assert not HashTrieMap({1: 2}).items().intersection({})
+
+ assert HashTrieMap({1: 2}).items().union({3}) == {(1, 2), 3}
+
+ assert HashTrieMap({1: 2, 3: 4}).items() < {(1, 2), (3, 4), ("foo", "bar")}
+ assert HashTrieMap({1: 2, 3: 4}).items() <= {(1, 2), (3, 4)}
+ assert not HashTrieMap({1: 2}).keys() < {1}
+ assert HashTrieMap({1: 2}).items() > set()
+ assert HashTrieMap({1: 2}).items() >= set()
+
+
+def test_items_repr():
+ m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3})
+ assert repr(m.items()) in {
+ "items_view([('foo', 3), (37, 'bar'), ('baz', 3)])",
+ "items_view([('foo', 3), ('baz', 3), (37, 'bar')])",
+ "items_view([(37, 'bar'), ('foo', 3), ('baz', 3)])",
+ "items_view([(37, 'bar'), ('baz', 3), ('foo', 3)])",
+ "items_view([('baz', 3), (37, 'bar'), ('foo', 3)])",
+ "items_view([('baz', 3), ('foo', 3), (37, 'bar')])",
+ }
+
+
+def test_fromkeys():
+ keys = list(range(10))
+ got = HashTrieMap.fromkeys(keys)
+ expected = HashTrieMap((i, None) for i in keys)
+ assert got == HashTrieMap(dict.fromkeys(keys)) == expected
+
+
+def test_fromkeys_explicit_value():
+ keys = list(range(10))
+ expected = HashTrieMap((i, "foo") for i in keys)
+ got = HashTrieMap.fromkeys(keys, "foo")
+ expected = HashTrieMap((i, "foo") for i in keys)
+ assert got == HashTrieMap(dict.fromkeys(keys, "foo")) == expected
+
+
+def test_fromkeys_explicit_value_not_copied():
+ keys = list(range(5))
+
+ got = HashTrieMap.fromkeys(keys, [])
+ got[3].append(1)
+
+ assert got == HashTrieMap((i, [1]) for i in keys)
+
+
+def test_update_with_iterable_of_kvs():
+ assert HashTrieMap({1: 2}).update(iter([(3, 4), ("5", 6)])) == HashTrieMap(
+ {
+ 1: 2,
+ 3: 4,
+ "5": 6,
+ },
+ )
diff --git a/tests/test_hash_trie_set.py b/tests/test_hash_trie_set.py
index 8048251..1dd21f4 100644
--- a/tests/test_hash_trie_set.py
+++ b/tests/test_hash_trie_set.py
@@ -26,14 +26,14 @@
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
+
+from collections import abc
import pickle
import pytest
from rpds import HashTrieSet
-HASH_MSG = "Not sure HashTrieSet implements Hash, it has mutable methods"
-
def test_key_is_tuple():
with pytest.raises(KeyError):
@@ -45,9 +45,23 @@ def test_key_is_not_tuple():
HashTrieSet().remove("asdf")
-@pytest.mark.xfail(reason=HASH_MSG)
-def test_supports_hash():
- assert hash(HashTrieSet((1, 2))) == hash(HashTrieSet(1, 2))
+def test_hashing():
+ o = object()
+
+ assert hash(HashTrieSet([o])) == hash(HashTrieSet([o]))
+ assert hash(HashTrieSet([o, o])) == hash(HashTrieSet([o, o]))
+ assert hash(HashTrieSet([])) == hash(HashTrieSet([]))
+ assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([1, 2]))
+ assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([2, 1]))
+ assert not (HashTrieSet([1, 2]) == HashTrieSet([1, 3]))
+ assert not (HashTrieSet([]) == HashTrieSet([o]))
+
+ assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([1, 3]))
+ assert hash(HashTrieSet([1, o])) != hash(HashTrieSet([1, 2]))
+ assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([2, 1, 3]))
+ assert not (HashTrieSet([o]) != HashTrieSet([o, o]))
+ assert not (HashTrieSet([o, o]) != HashTrieSet([o, o]))
+ assert not (HashTrieSet() != HashTrieSet([]))
def test_empty_truthiness():
@@ -142,7 +156,7 @@ def test_repr():
def test_update():
assert HashTrieSet([1, 2, 3]).update([3, 4, 4, 5]) == HashTrieSet(
- [1, 2, 3, 4, 5]
+ [1, 2, 3, 4, 5],
)
@@ -174,6 +188,10 @@ def test_more_eq():
assert not (HashTrieSet([o]) != HashTrieSet([o, o]))
assert not (HashTrieSet() != HashTrieSet([]))
+ assert HashTrieSet([1, 2]) == {1, 2}
+ assert HashTrieSet([1, 2]) != {1, 2, 3}
+ assert HashTrieSet([1, 2]) != [1, 2]
+
def test_more_set_comparisons():
s = HashTrieSet([1, 2, 3])
@@ -187,5 +205,16 @@ def test_more_set_comparisons():
def test_pickle():
assert pickle.loads(
- pickle.dumps(HashTrieSet([1, 2, 3, 4]))
+ pickle.dumps(HashTrieSet([1, 2, 3, 4])),
) == HashTrieSet([1, 2, 3, 4])
+
+
+def test_instance_of_set():
+ assert isinstance(HashTrieSet(), abc.Set)
+
+
+def test_lt_le_gt_ge():
+ assert HashTrieSet({}) < {1}
+ assert HashTrieSet({}) <= {1}
+ assert HashTrieSet({1}) > set()
+ assert HashTrieSet({1}) >= set()
diff --git a/tests/test_list.py b/tests/test_list.py
index 09ac5cc..6f00b30 100644
--- a/tests/test_list.py
+++ b/tests/test_list.py
@@ -26,14 +26,13 @@
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
+
import pickle
import pytest
from rpds import List
-HASH_MSG = "Not sure List implements Hash, it has mutable methods"
-
def test_literalish_works():
assert List(1, 2, 3) == List([1, 2, 3])
@@ -99,10 +98,24 @@ def test_repr():
assert str(List([1, 2, 3])) in "List([1, 2, 3])"
-@pytest.mark.xfail(reason=HASH_MSG)
def test_hashing():
- assert hash(List([1, 2])) == hash(List([1, 2]))
+ o = object()
+
+ assert hash(List([o, o])) == hash(List([o, o]))
+ assert hash(List([o])) == hash(List([o]))
+ assert hash(List()) == hash(List([]))
+ assert not (hash(List([1, 2])) == hash(List([1, 3])))
+ assert not (hash(List([1, 2])) == hash(List([2, 1])))
+ assert not (hash(List([o])) == hash(List([o, o])))
+ assert not (hash(List([])) == hash(List([o])))
+
+ assert hash(List([1, 2])) != hash(List([1, 3]))
assert hash(List([1, 2])) != hash(List([2, 1]))
+ assert hash(List([o])) != hash(List([o, o]))
+ assert hash(List([])) != hash(List([o]))
+ assert not (hash(List([o, o])) != hash(List([o, o])))
+ assert not (hash(List([o])) != hash(List([o])))
+ assert not (hash(List([])) != hash(List([])))
def test_sequence():
diff --git a/tests/test_queue.py b/tests/test_queue.py
new file mode 100644
index 0000000..e2dadab
--- /dev/null
+++ b/tests/test_queue.py
@@ -0,0 +1,138 @@
+"""
+Modified from the pyrsistent test suite.
+
+Pre-modification, these were MIT licensed, and are copyright:
+
+ Copyright (c) 2022 Tobias Gustafsson
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+"""
+
+import pytest
+
+from rpds import Queue
+
+
+def test_literalish_works():
+ assert Queue(1, 2, 3) == Queue([1, 2, 3])
+
+
+def test_peek_dequeue():
+ pl = Queue([1, 2])
+ assert pl.peek == 1
+ assert pl.dequeue().peek == 2
+ assert pl.dequeue().dequeue().is_empty
+ with pytest.raises(IndexError):
+ pl.dequeue().dequeue().dequeue()
+
+
+def test_instantiate_large_list():
+ assert Queue(range(1000)).peek == 0
+
+
+def test_iteration():
+ assert list(Queue()) == []
+ assert list(Queue([1, 2, 3])) == [1, 2, 3]
+
+
+def test_enqueue():
+ assert Queue([1, 2, 3]).enqueue(4) == Queue([1, 2, 3, 4])
+
+
+def test_enqueue_empty_list():
+ assert Queue().enqueue(0) == Queue([0])
+
+
+def test_truthiness():
+ assert Queue([1])
+ assert not Queue()
+
+
+def test_len():
+ assert len(Queue([1, 2, 3])) == 3
+ assert len(Queue()) == 0
+
+
+def test_peek_illegal_on_empty_list():
+ with pytest.raises(IndexError):
+ Queue().peek
+
+
+def test_inequality():
+ assert Queue([1, 2]) != Queue([1, 3])
+ assert Queue([1, 2]) != Queue([1, 2, 3])
+ assert Queue() != Queue([1, 2, 3])
+
+
+def test_repr():
+ assert str(Queue()) == "Queue([])"
+ assert str(Queue([1, 2, 3])) in "Queue([1, 2, 3])"
+
+
+def test_sequence():
+ m = Queue("asdf")
+ assert m == Queue(["a", "s", "d", "f"])
+
+
+# Non-pyrsistent-test-suite tests
+
+
+def test_dequeue():
+ assert Queue([1, 2, 3]).dequeue() == Queue([2, 3])
+
+
+def test_dequeue_empty():
+ """
+ rpds itself returns an Option here but we try IndexError instead.
+ """
+ with pytest.raises(IndexError):
+ Queue([]).dequeue()
+
+
+def test_more_eq():
+ o = object()
+
+ assert Queue([o, o]) == Queue([o, o])
+ assert Queue([o]) == Queue([o])
+ assert Queue() == Queue([])
+ assert not (Queue([1, 2]) == Queue([1, 3]))
+ assert not (Queue([o]) == Queue([o, o]))
+ assert not (Queue([]) == Queue([o]))
+
+ assert Queue([1, 2]) != Queue([1, 3])
+ assert Queue([o]) != Queue([o, o])
+ assert Queue([]) != Queue([o])
+ assert not (Queue([o, o]) != Queue([o, o]))
+ assert not (Queue([o]) != Queue([o]))
+ assert not (Queue() != Queue([]))
+
+
+def test_hashing():
+ assert hash(Queue([1, 2])) == hash(Queue([1, 2]))
+ assert hash(Queue([1, 2])) != hash(Queue([2, 1]))
+ assert len({Queue([1, 2]), Queue([1, 2])}) == 1
+
+
+def test_unhashable_contents():
+ q = Queue([1, {1}])
+ with pytest.raises(TypeError):
+ hash(q)