From b56194f45221e8a0c6ea836724c6f79f6ed7ce2d Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 28 Oct 2025 15:08:04 +1000 Subject: [PATCH 01/54] feat: add RDF4JClient, RepositoryManager, and initial Repository implementation Also set up and add tests --- poetry.lock | 332 +++++++++++++++++- pyproject.toml | 6 +- rdflib/rdf4j/__init__.py | 3 + rdflib/rdf4j/client.py | 225 ++++++++++++ test/test_rdf4j/conftest.py | 40 +++ test/test_rdf4j/docker/Dockerfile | 4 + test/test_rdf4j/docker/settings.txt | 6 + test/test_rdf4j/docker/users.txt | 42 +++ .../repo-configs/test-repo-config.ttl | 16 + test/test_rdf4j/test_repo_management.py | 45 +++ 10 files changed, 708 insertions(+), 11 deletions(-) create mode 100644 rdflib/rdf4j/__init__.py create mode 100644 rdflib/rdf4j/client.py create mode 100644 test/test_rdf4j/conftest.py create mode 100644 test/test_rdf4j/docker/Dockerfile create mode 100644 test/test_rdf4j/docker/settings.txt create mode 100644 test/test_rdf4j/docker/users.txt create mode 100644 test/test_rdf4j/repo-configs/test-repo-config.ttl create mode 100644 test/test_rdf4j/test_repo_management.py diff --git a/poetry.lock b/poetry.lock index 8622bb56c..b9be33438 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,26 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "anyio" +version = "4.11.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = true +python-versions = ">=3.9" +files = [ + {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, + {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.31.0)"] + [[package]] name = "babel" version = "2.12.1" @@ -22,9 +42,6 @@ files = [ {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [[package]] name = "berkeleydb" version = "18.1.10" @@ -313,6 +330,28 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "docutils" version = "0.20.1" @@ -338,6 +377,17 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = true +python-versions = ">=3.8" +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + [[package]] name = "html5rdf" version = "1.2.1" @@ -349,6 +399,51 @@ files = [ {file = "html5rdf-1.2.1.tar.gz", hash = "sha256:ace9b420ce52995bb4f05e7425eedf19e433c981dfe7a831ab391e2fa2e1a195"}, ] +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = true +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = true +python-versions = ">=3.8" +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "idna" version = "3.4" @@ -1088,14 +1183,46 @@ pytest = ">=4.6" testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] -name = "pytz" -version = "2023.3" -description = "World timezone definitions, modern and historical" +name = "python-dotenv" +version = "1.2.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +files = [ + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pywin32" +version = "311" +description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, + {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, + {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, + {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, + {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, + {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, + {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, + {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, + {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, + {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, + {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, + {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, + {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, + {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, + {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, + {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, + {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, + {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, + {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, + {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, ] [[package]] @@ -1222,6 +1349,17 @@ core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.te doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = true +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1391,6 +1529,60 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "testcontainers" +version = "4.13.2" +description = "Python library for throwaway instances of anything that can run in a Docker container" +optional = false +python-versions = "<4.0,>=3.9.2" +files = [ + {file = "testcontainers-4.13.2-py3-none-any.whl", hash = "sha256:0209baf8f4274b568cde95bef2cadf7b1d33b375321f793790462e235cd684ee"}, + {file = "testcontainers-4.13.2.tar.gz", hash = "sha256:2315f1e21b059427a9d11e8921f85fef322fbe0d50749bcca4eaa11271708ba4"}, +] + +[package.dependencies] +docker = "*" +python-dotenv = "*" +typing-extensions = "*" +urllib3 = "*" +wrapt = "*" + +[package.extras] +arangodb = ["python-arango (>=7.8,<8.0)"] +aws = ["boto3", "httpx"] +azurite = ["azure-storage-blob (>=12.19,<13.0)"] +chroma = ["chromadb-client (>=1.0.0,<2.0.0)"] +clickhouse = ["clickhouse-driver"] +cosmosdb = ["azure-cosmos"] +db2 = ["ibm_db_sa", "sqlalchemy"] +generic = ["httpx", "redis"] +google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] +influxdb = ["influxdb", "influxdb-client"] +k3s = ["kubernetes", "pyyaml"] +keycloak = ["python-keycloak"] +localstack = ["boto3"] +mailpit = ["cryptography"] +minio = ["minio"] +mongodb = ["pymongo"] +mssql = ["pymssql", "sqlalchemy"] +mysql = ["pymysql[rsa]", "sqlalchemy"] +nats = ["nats-py"] +neo4j = ["neo4j"] +openfga = ["openfga-sdk"] +opensearch = ["opensearch-py"] +oracle = ["oracledb", "sqlalchemy"] +oracle-free = ["oracledb", "sqlalchemy"] +qdrant = ["qdrant-client"] +rabbitmq = ["pika"] +redis = ["redis"] +registry = ["bcrypt"] +scylla = ["cassandra-driver (==3.29.1)"] +selenium = ["selenium"] +sftp = ["cryptography"] +test-module-import = ["httpx"] +trino = ["trino"] +weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] + [[package]] name = "tomli" version = "2.0.1" @@ -1455,6 +1647,125 @@ files = [ [package.extras] test = ["pytest (>=6.0.0)", "setuptools (>=65)"] +[[package]] +name = "wrapt" +version = "2.0.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +files = [ + {file = "wrapt-2.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a7cebcee61f21b1e46aa32db8d9d93826d0fbf1ad85defc2ccfb93b4adef1435"}, + {file = "wrapt-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:827e6e3a3a560f6ec1f5ee92d4319c21a0549384f896ec692f3201eda31ebd11"}, + {file = "wrapt-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a91075a5383a7cbfe46aed1845ef7c3f027e8e20e7d9a8a75e36ebc9b0dd15e"}, + {file = "wrapt-2.0.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b6a18c813196e18146b8d041e20875bdb0cb09b94ac1d1e1146e0fa87b2deb0d"}, + {file = "wrapt-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec5028d26011a53c76bd91bb6198b30b438c6e0f7adb45f2ad84fe2655b6a104"}, + {file = "wrapt-2.0.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bed9b04900204721a24bcefc652ca267b01c1e8ad8bc8c0cff81558a45a3aadc"}, + {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03442f2b45fa3f2b98a94a1917f52fb34670de8f96c0a009c02dbd512d855a3d"}, + {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:17d0b5c42495ba142a1cee52b76414f9210591c84aae94dffda70240753bfb3c"}, + {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ee44215e7d13e112a8fc74e12ed1a1f41cab2bc07b11cc703f2398cd114b261c"}, + {file = "wrapt-2.0.0-cp310-cp310-win32.whl", hash = "sha256:fe6eafac3bc3c957ab6597a0c0654a0a308868458d00d218743e5b5fae51951c"}, + {file = "wrapt-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e070c3491397fba0445b8977900271eca9656570cca7c900d9b9352186703a0"}, + {file = "wrapt-2.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:806e2e73186eb5e3546f39fb5d0405040e0088db0fc8b2f667fd1863de2b3c99"}, + {file = "wrapt-2.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b7e221abb6c5387819db9323dac3c875b459695057449634f1111955d753c621"}, + {file = "wrapt-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1147a84c8fc852426580af8b6e33138461ddbc65aa459a25ea539374d32069fa"}, + {file = "wrapt-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d6691d4a711504a0bc10de789842ad6ac627bed22937b10f37a1211a8ab7bb3"}, + {file = "wrapt-2.0.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f460e1eb8e75a17c3918c8e35ba57625721eef2439ef0bcf05304ac278a65e1d"}, + {file = "wrapt-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:12c37784b77bf043bf65cc96c7195a5db474b8e54173208af076bdbb61df7b3e"}, + {file = "wrapt-2.0.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75e5c049eb583835f7a0e0e311d9dde9bfbaac723a6dd89d052540f9b2809977"}, + {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e50bcbd5b65dac21b82319fcf18486e6ac439947e9305034b00704eb7405f553"}, + {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:06b78cb6b9320f57737a52fede882640d93cface98332d1a3df0c5696ec9ae9f"}, + {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c8349ebfc3cd98bc9105e0112dd8c8ac1f3c7cb5601f9d02248cae83a63f748"}, + {file = "wrapt-2.0.0-cp311-cp311-win32.whl", hash = "sha256:028f19ec29e204fe725139d4a8b09f77ecfb64f8f02b7ab5ee822c85e330b68b"}, + {file = "wrapt-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:c6961f05e58d919153ba311b397b7b904b907132b7b8344dde47865d4bb5ec89"}, + {file = "wrapt-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:be7e316c2accd5a31dbcc230de19e2a846a325f8967fdea72704d00e38e6af06"}, + {file = "wrapt-2.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73c6f734aecb1a030d9a265c13a425897e1ea821b73249bb14471445467ca71c"}, + {file = "wrapt-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b4a7f8023b8ce8a36370154733c747f8d65c8697cb977d8b6efeb89291fff23e"}, + {file = "wrapt-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1cb62f686c50e9dab5983c68f6c8e9cbf14a6007935e683662898a7d892fa69"}, + {file = "wrapt-2.0.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:43dc0550ae15e33e6bb45a82a5e1b5495be2587fbaa996244b509921810ee49f"}, + {file = "wrapt-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39c5b45b056d630545e40674d1f5e1b51864b3546f25ab6a4a331943de96262e"}, + {file = "wrapt-2.0.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:804e88f824b76240a1b670330637ccfd2d18b9efa3bb4f02eb20b2f64880b324"}, + {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c2c476aa3fc2b9899c3f7b20963fac4f952e7edb74a31fc92f7745389a2e3618"}, + {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8d851e526891216f89fcb7a1820dad9bd503ba3468fb9635ee28e93c781aa98e"}, + {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b95733c2360c4a8656ee93c7af78e84c0bd617da04a236d7a456c8faa34e7a2d"}, + {file = "wrapt-2.0.0-cp312-cp312-win32.whl", hash = "sha256:ea56817176834edf143df1109ae8fdaa087be82fdad3492648de0baa8ae82bf2"}, + {file = "wrapt-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c7d3bee7be7a2665286103f4d1f15405c8074e6e1f89dac5774f9357c9a3809"}, + {file = "wrapt-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:680f707e1d26acbc60926659799b15659f077df5897a6791c7c598a5d4a211c4"}, + {file = "wrapt-2.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e2ea096db28d5eb64d381af0e93464621ace38a7003a364b6b5ffb7dd713aabe"}, + {file = "wrapt-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c92b5a82d28491e3f14f037e1aae99a27a5e6e0bb161e65f52c0445a3fa7c940"}, + {file = "wrapt-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81d234718aabe632d179fac52c7f69f0f99fbaac4d4bcd670e62462bbcbfcad7"}, + {file = "wrapt-2.0.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db2eea83c43f84e4e41dbbb4c1de371a53166e55f900a6b130c3ef51c6345c1a"}, + {file = "wrapt-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65f50e356c425c061e1e17fe687ff30e294fed9bf3441dc1f13ef73859c2a817"}, + {file = "wrapt-2.0.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:887f2a667e3cbfb19e204032d42ad7dedaa43972e4861dc7a3d51ae951d9b578"}, + {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9054829da4be461e3ad3192e4b6bbf1fc18af64c9975ce613aec191924e004dc"}, + {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b952ffd77133a5a2798ee3feb18e51b0a299d2f440961e5bb7737dbb02e57289"}, + {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e25fde03c480061b8234d8ee4863eb5f40a9be4fb258ce105b364de38fc6bcf9"}, + {file = "wrapt-2.0.0-cp313-cp313-win32.whl", hash = "sha256:49e982b7860d325094978292a49e0418833fc7fc42c0dc7cd0b7524d7d06ee74"}, + {file = "wrapt-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:6e5c86389d9964050ce50babe247d172a5e3911d59a64023b90db2b4fa00ae7c"}, + {file = "wrapt-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:b96fdaa4611e05c7231937930567d3c16782be9dbcf03eb9f60d83e57dd2f129"}, + {file = "wrapt-2.0.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f2c7b7fead096dbf1dcc455b7f59facb05de3f5bfb04f60a69f98cdfe6049e5f"}, + {file = "wrapt-2.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:04c7c8393f25b11c0faa5d907dd9eb462e87e4e7ba55e308a046d7ed37f4bbe2"}, + {file = "wrapt-2.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a93e0f8b376c0735b2f4daf58018b4823614d2b896cb72b6641c4d3dbdca1d75"}, + {file = "wrapt-2.0.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b42d13603da4416c43c430dbc6313c8d7ff745c40942f146ed4f6dd02c7d2547"}, + {file = "wrapt-2.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8bbd2472abf8c33480ad2314b1f8fac45d592aba6cc093e8839a7b2045660e6"}, + {file = "wrapt-2.0.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e64a3a1fd9a308ab9b815a2ad7a65b679730629dbf85f8fc3f7f970d634ee5df"}, + {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d61214525eaf88e0d0edf3d1ad5b5889863c6f88e588c6cdc6aa4ee5d1f10a4a"}, + {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:04f7a5f92c5f7324a1735043cc467b1295a1c5b4e0c1395472b7c44706e3dc61"}, + {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2356f76cb99b3de5b4e5b8210367fbbb81c7309fe39b622f5d199dd88eb7f765"}, + {file = "wrapt-2.0.0-cp313-cp313t-win32.whl", hash = "sha256:0a921b657a224e40e4bc161b5d33934583b34f0c9c5bdda4e6ac66f9d2fcb849"}, + {file = "wrapt-2.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:c16f6d4eea98080f6659a8a7fc559d4a0a337ee66960659265cad2c8a40f7c0f"}, + {file = "wrapt-2.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:52878edc13dc151c58a9966621d67163a80654bc6cff4b2e1c79fa62d0352b26"}, + {file = "wrapt-2.0.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:79a53d86c2aff7b32cc77267e3a308365d1fcb881e74bc9cbe26f63ee90e37f0"}, + {file = "wrapt-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d731a4f22ed6ffa4cb551b4d2b0c24ff940c27a88edaf8e3490a5ee3a05aef71"}, + {file = "wrapt-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e02ab8c0ac766a5a6e81cd3b6cc39200c69051826243182175555872522bd5a"}, + {file = "wrapt-2.0.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:895870602d65d7338edb3b6a717d856632ad9f14f7ff566214e4fb11f0816649"}, + {file = "wrapt-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b9ad4fab76a0086dc364c4f17f39ad289600e73ef5c6e9ab529aff22cac1ac3"}, + {file = "wrapt-2.0.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e7ca0562606d7bad2736b2c18f61295d61f50cd3f4bfc51753df13614dbcce1b"}, + {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fe089d9f5a4a3dea0108a8ae34bced114d0c4cca417bada1c5e8f42d98af9050"}, + {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e761f2d2f8dbc80384af3d547b522a80e67db3e319c7b02e7fd97aded0a8a678"}, + {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:17ba1bdc52d0c783481850996aa26cea5237720769197335abea2ae6b4c23bc0"}, + {file = "wrapt-2.0.0-cp314-cp314-win32.whl", hash = "sha256:f73318741b141223a4674ba96992aa2291b1b3f7a5e85cb3c2c964f86171eb45"}, + {file = "wrapt-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8e08d4edb13cafe7b3260f31d4de033f73d3205774540cf583bffaa4bec97db9"}, + {file = "wrapt-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:af01695c2b7bbd8d67b869d8e3de2b123a7bfbee0185bdd138c2775f75373b83"}, + {file = "wrapt-2.0.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:057f02c13cce7b26c79624c06a3e1c2353e6dc9708525232232f6768118042ca"}, + {file = "wrapt-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:79bdd84570267f3f43d609c892ae2d30b91ee4b8614c2cbfd311a2965f1c9bdb"}, + {file = "wrapt-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:93c8b4f4d54fd401a817abbfc9bf482aa72fd447f8adf19ce81d035b3f5c762c"}, + {file = "wrapt-2.0.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5e09ffd31001dce71c2c2a4fc201bdba9a2f9f62b23700cf24af42266e784741"}, + {file = "wrapt-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d87c285ff04e26083c4b03546e7b74df7ba4f1f32f1dcb92e9ac13c2dbb4c379"}, + {file = "wrapt-2.0.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e52e50ea0a72ea48d1291cf8b8aaedcc99072d9dc5baba6b820486dcf4c67da8"}, + {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fd4c95536975895f32571073446e614d5e2810b666b64955586dcddfd438fd3"}, + {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d6ebfe9283209220ed9de80a3e9442aab8fc2be5a9bbf8491b99e02ca9349a89"}, + {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5d3ebd784804f146b7ea55359beb138e23cc18e5a5cc2cf26ad438723c00ce3a"}, + {file = "wrapt-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:9b15940ae9debc8b40b15dc57e1ce4433f7fb9d3f8761c7fab1ddd94cb999d99"}, + {file = "wrapt-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a0efbbc06d3e2077476a04f55859819d23206600b4c33f791359a8e6fa3c362"}, + {file = "wrapt-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:7fec8a9455c029c8cf4ff143a53b6e7c463268d42be6c17efa847ebd2f809965"}, + {file = "wrapt-2.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ac3d8beac68e4863c703b844fcc82693f83f933b37d2a54e9d513b2aab9c76aa"}, + {file = "wrapt-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4b8f8644602803add6848c81b7d214cfd397b1ebab2130dc8530570d888155c"}, + {file = "wrapt-2.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93cb5bff1fcd89b75f869e4f69566a91ab2c9f13e8edf0241fd5777b2fa6d48e"}, + {file = "wrapt-2.0.0-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0eb6d155d02c7525b7ec09856cda5e611fc6eb9ab40d140e1f35f27ac7d5eae"}, + {file = "wrapt-2.0.0-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:309dd467a94ee38a7aa5752bda64e660aeab5723b26200d0b65a375dad9add09"}, + {file = "wrapt-2.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a55e8edd08e2eece131d90d82cd1521962d9152829b22c56e68539526d605825"}, + {file = "wrapt-2.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1724dd7b84d419c80ba839da81ad78b02ac30df626e5aefcb18e94632a965f13"}, + {file = "wrapt-2.0.0-cp38-cp38-win32.whl", hash = "sha256:f8255c380a79f6752d0b920e69a5d656d863675d9c433eeb5548518ee2c8d9da"}, + {file = "wrapt-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:829c8d46465dbae49dba91516f11200a2b5ea91eae8afaccbc035f0b651eb9c4"}, + {file = "wrapt-2.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:094d348ce7e6ce37bf6ed9a6ecc11886c96f447b3ffebc7539ca197daa9a997e"}, + {file = "wrapt-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98223acaa25b1449d993a3f4ffc8b5a03535e4041b37bf6a25459a0c74ee4cfc"}, + {file = "wrapt-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b79bf04c722035b1c474980dc1a64369feab7b703d6fe67da2d8664ed0bc980"}, + {file = "wrapt-2.0.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:623242959cb0c53f76baeb929be79f5f6a9a1673ef51628072b91bf299af2212"}, + {file = "wrapt-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59dc94afc4542c7d9b9447fb2ae1168b5a29064eca4061dbbf3b3c26df268334"}, + {file = "wrapt-2.0.0-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7c532cc9f0a9e6017f8d3c37f478a3e3a5dffa955ebba556274e5e916c058f7"}, + {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9d72c725cefbcc8ebab85c8352e5062ae87b6e323858e934e16b54ced580435a"}, + {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:2ca35b83497276c2ca0b072d2c00da2edde4c2a6c8c650eafcd1a006c17ab231"}, + {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2fc55d0da29318a5da33c2827aef8946bba046ac609a4784a90faff73c511174"}, + {file = "wrapt-2.0.0-cp39-cp39-win32.whl", hash = "sha256:9c100b0598f3763274f2033bcc0454de7486409f85bc6da58b49e5971747eb36"}, + {file = "wrapt-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:1316972a72c67936a07dbb48e2464356d91dd9674335aaec087b60094d87750b"}, + {file = "wrapt-2.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:5aad54ff45da9784573099696fd84841c7e559ce312f02afa6aa7e89b58e2c2f"}, + {file = "wrapt-2.0.0-py3-none-any.whl", hash = "sha256:02482fb0df89857e35427dfb844319417e14fae05878f295ee43fa3bf3b15502"}, + {file = "wrapt-2.0.0.tar.gz", hash = "sha256:35a542cc7a962331d0279735c30995b024e852cf40481e384fd63caaa391cbb9"}, +] + +[package.extras] +dev = ["pytest", "setuptools"] + [[package]] name = "zipp" version = "3.16.2" @@ -1476,8 +1787,9 @@ html = ["html5rdf"] lxml = ["lxml"] networkx = ["networkx"] orjson = ["orjson"] +rdf4j = ["httpx"] [metadata] lock-version = "2.0" -python-versions = ">=3.8.1" -content-hash = "0ec27e1dca1f3b60dce28a2109a95eaada0bce2cbfbaee4d209d434a0d6e4086" +python-versions = ">=3.9.2, <4.0" +content-hash = "254e5970e2a5c3b0ffbd4f8ee55316c6a39a76ac71ad39b68ea4269dd00e8805" diff --git a/pyproject.toml b/pyproject.toml index ce8042231..0c560d4e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,8 @@ rdfs2dot = 'rdflib.tools.rdfs2dot:main' rdfgraphisomorphism = 'rdflib.tools.graphisomorphism:main' [tool.poetry.dependencies] -python = ">=3.8.1" +# TODO: temporarily add new python version constraints for testcontainers +python = ">=3.9.2, <4.0" isodate = {version=">=0.7.2,<1.0.0", python = "<3.11"} pyparsing = ">=2.1.0,<4" berkeleydb = {version = "^18.1.0", optional = true} @@ -46,6 +47,7 @@ networkx = {version = ">=2,<4", optional = true} html5rdf = {version = ">=1.2,<2", optional = true} lxml = {version = ">=4.3,<6.0", optional = true} orjson = {version = ">=3.9.14,<4", optional = true} +httpx = {version = "^0.28.1", optional = true} [tool.poetry.group.dev.dependencies] black = "24.4.2" @@ -60,6 +62,7 @@ coverage = {version = "^7.0.1", extras = ["toml"]} types-setuptools = ">=68.0.0.3,<72.0.0.0" setuptools = ">=68,<72" wheel = ">=0.42,<0.46" +testcontainers = "^4.13.2" [tool.poetry.group.docs.dependencies] sphinx = ">=7.1.2,<8" @@ -79,6 +82,7 @@ html = ["html5rdf"] # lxml support is optional, it is used only for parsing XML-formatted SPARQL results lxml = ["lxml"] orjson = ["orjson"] +rdf4j = ["httpx"] [build-system] requires = ["poetry-core>=1.4.0"] diff --git a/rdflib/rdf4j/__init__.py b/rdflib/rdf4j/__init__.py new file mode 100644 index 000000000..df2a3556c --- /dev/null +++ b/rdflib/rdf4j/__init__.py @@ -0,0 +1,3 @@ +from .client import RDF4JClient + +__all__ = ["RDF4JClient"] diff --git a/rdflib/rdf4j/client.py b/rdflib/rdf4j/client.py new file mode 100644 index 000000000..ca4103158 --- /dev/null +++ b/rdflib/rdf4j/client.py @@ -0,0 +1,225 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any + +import httpx + + +class RepositoryError(Exception): + """Raised when interactions on a repository result in an error.""" + + +class RepositoryFormatError(RepositoryError): + """Raised when the repository format is invalid.""" + + +class RepositoryNotFoundError(RepositoryError): + """Raised when the repository is not found.""" + + +class RepositoryNotHealthyError(RepositoryError): + """Raised when the repository is not healthy.""" + + +class RepositoryAlreadyExistsError(RepositoryError): + """Raised when the repository already exists.""" + + +@dataclass(frozen=True) +class RepositoryResult: + """RDF4J repository result object.""" + + identifier: str + uri: str + readable: bool + writable: bool + title: str | None = None + + +class Repository: + def __init__(self, identifier: str, http_client: httpx.Client): + """RDF4J repository client.""" + self._identifier = identifier + self._http_client = http_client + + @property + def identifier(self): + """Repository identifier.""" + return self._identifier + + def health(self) -> bool: + """Check if the repository is healthy. + + :returns: Returns True if the repository is healthy, otherwise an error is raised. + :raises httpx.RequestError: On network/connection issues. + :raises RepositoryNotFoundError: If the repository is not found. + :raises RepositoryNotHealthyError: If the repository is not healthy. + """ + headers = { + "Content-Type": "application/sparql-query", + "Accept": "application/sparql-results+json", + } + try: + response = self._http_client.post( + f"/repositories/{self._identifier}", headers=headers, content="ASK {}" + ) + response.raise_for_status() + return True + except httpx.HTTPStatusError as err: + if err.response.status_code == 404: + raise RepositoryNotFoundError( + f"Repository {self._identifier} not found." + ) + raise RepositoryNotHealthyError( + f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}" + ) + except httpx.RequestError: + raise + + +class RepositoryManager: + """Client to manage server-level repository operations. + + This includes listing, creating, and deleting of repositories. + """ + + def __init__(self, http_client: httpx.Client): + self._http_client = http_client + + def list(self) -> list[RepositoryResult]: + """List all available repositories. + + :returns: List of repository results. + :raises httpx.RequestError: On network/connection issues. + :raises RepositoryFormatError: If the response format is unrecognized. + """ + headers = { + "Accept": "application/sparql-results+json", + } + try: + response = self._http_client.get("/repositories", headers=headers) + response.raise_for_status() + + try: + data = response.json() + results = data["results"]["bindings"] + return [ + RepositoryResult( + identifier=repo["id"]["value"], + uri=repo["uri"]["value"], + readable=repo["readable"]["value"], + writable=repo["writable"]["value"], + title=repo.get("title", {}).get("value"), + ) + for repo in results + ] + except (KeyError, ValueError) as err: + raise RepositoryFormatError(f"Unrecognised response format: {err}") + except httpx.RequestError: + raise + + def get(self, repository_id: str) -> Repository: + """Get a repository by ID. + + This performs a health check before returning the repository object. + + :param repository_id: The identifier of the repository. + :returns: The repository instance. + :raises httpx.RequestError: On network/connection issues. + :raises RepositoryNotFoundError: If the repository is not found. + :raises RepositoryNotHealthyError: If the repository is not healthy. + """ + repo = Repository(repository_id, self._http_client) + try: + repo.health() + return repo + except (httpx.RequestError, RepositoryNotFoundError, RepositoryNotHealthyError): + raise + + def create( + self, repository_id: str, data: str, format: str = "text/turtle" + ) -> Repository: + """Create a new repository. + + :param repository_id: The identifier of the repository. + :param data: The repository configuration in RDF. + :param format: The repository configuration format. + :raises httpx.RequestError: On network/connection issues. + :raises RepositoryAlreadyExistsError: If the repository already exists. + :raises RepositoryNotHealthyError: If the repository is not healthy. + """ + try: + headers = {"Content-Type": format} + response = self._http_client.put( + f"/repositories/{repository_id}", headers=headers, content=data + ) + response.raise_for_status() + return self.get(repository_id) + except httpx.RequestError: + raise + except httpx.HTTPStatusError as err: + if err.response.status_code == 409: + raise RepositoryAlreadyExistsError( + f"Repository {repository_id} already exists." + ) + raise + + def delete(self, repository_id: str) -> None: + """Delete a repository. + + :param repository_id: The identifier of the repository. + :raises httpx.RequestError: On network/connection issues. + :raises RepositoryNotFoundError: If the repository is not found. + :raises RepositoryError: If the repository is not deleted successfully. + """ + try: + response = self._http_client.delete(f"/repositories/{repository_id}") + response.raise_for_status() + if response.status_code != 204: + raise RepositoryError( + f"Unexpected response status code when deleting repository {repository_id}: {response.status_code} - {response.text.strip()}" + ) + except httpx.RequestError: + raise + except httpx.HTTPStatusError as err: + if err.response.status_code == 404: + raise RepositoryNotFoundError(f"Repository {repository_id} not found.") + raise + + +class RDF4JClient: + def __init__( + self, + base_url: str, + auth: tuple[str, str] | None = None, + timeout: float = 30.0, + **kwargs: Any, + ): + """RDF4J client. + + :param base_url: The base URL of the RDF4J server. + :param auth: Authentication tuple (username, password). + :param timeout: Request timeout in seconds (default: 30.0). + :param kwargs: Additional keyword arguments to pass to the httpx.Client. + """ + if not base_url.endswith("/"): + base_url += "/" + self._http_client = httpx.Client( + base_url=base_url, auth=auth, timeout=timeout, **kwargs + ) + self._repository_manager = RepositoryManager(self._http_client) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + @property + def repositories(self): + """Server-level repository management operations.""" + return self._repository_manager + + def close(self): + self._http_client.close() diff --git a/test/test_rdf4j/conftest.py b/test/test_rdf4j/conftest.py new file mode 100644 index 000000000..ab20d3794 --- /dev/null +++ b/test/test_rdf4j/conftest.py @@ -0,0 +1,40 @@ +import pathlib + +import pytest +from testcontainers.core.container import DockerContainer +from testcontainers.core.image import DockerImage +from testcontainers.core.waiting_utils import wait_for_logs + +from rdflib.rdf4j import RDF4JClient + +RDF4J_IMAGE = "eclipse/rdf4j-workbench:5.1.6-jetty" +RDF4J_PORT = 8080 +GRAPHDB_PORT = 7200 + + +@pytest.fixture(scope="function") +def rdf4j_container(): + container = DockerContainer(RDF4J_IMAGE) + container.with_exposed_ports(RDF4J_PORT) + container.start() + wait_for_logs(container, "oejs.Server:main: Started") + yield container + container.stop() + + +@pytest.fixture(scope="function") +def graphdb_container(): + with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image: + container = DockerContainer(str(image)) + container.with_exposed_ports(GRAPHDB_PORT) + container.start() + wait_for_logs(container, "Started GraphDB") + yield container + container.stop() + + +@pytest.fixture(scope="function") +def client(graphdb_container: DockerContainer): + port = graphdb_container.get_exposed_port(7200) + with RDF4JClient(f"http://localhost:{port}/", auth=("admin", "admin")) as client: + yield client diff --git a/test/test_rdf4j/docker/Dockerfile b/test/test_rdf4j/docker/Dockerfile new file mode 100644 index 000000000..0920351df --- /dev/null +++ b/test/test_rdf4j/docker/Dockerfile @@ -0,0 +1,4 @@ +FROM ontotext/graphdb:10.8.4 + +COPY settings.txt /opt/graphdb/home/data/settings.js +COPY users.txt /opt/graphdb/home/data/users.js diff --git a/test/test_rdf4j/docker/settings.txt b/test/test_rdf4j/docker/settings.txt new file mode 100644 index 000000000..d861a36c4 --- /dev/null +++ b/test/test_rdf4j/docker/settings.txt @@ -0,0 +1,6 @@ +{ + "properties" : { + "current.location" : "", + "security.enabled" : "true" + } +} diff --git a/test/test_rdf4j/docker/users.txt b/test/test_rdf4j/docker/users.txt new file mode 100644 index 000000000..5480bf133 --- /dev/null +++ b/test/test_rdf4j/docker/users.txt @@ -0,0 +1,42 @@ +{ + "users" : { + "admin" : { + "username" : "admin", + "password" : "{bcrypt}$2a$10$3EYdj3fBH0/.aA/fRodud.T2YPtSVlC7J/d.9Jk8v1pHd1ar8HEau", + "grantedAuthorities" : [ "ROLE_ADMIN" ], + "appSettings" : { + "DEFAULT_INFERENCE" : true, + "DEFAULT_VIS_GRAPH_SCHEMA" : true, + "DEFAULT_SAMEAS" : true, + "IGNORE_SHARED_QUERIES" : false, + "EXECUTE_COUNT" : true + }, + "dateCreated" : 1761543222200, + "gptThreads" : [ ] + } + }, + "user_queries" : { + "admin" : { + "SPARQL Select template" : { + "name" : "SPARQL Select template", + "body" : "SELECT ?s ?p ?o\nWHERE {\n\t?s ?p ?o .\n} LIMIT 100", + "shared" : false + }, + "Clear graph" : { + "name" : "Clear graph", + "body" : "CLEAR GRAPH ", + "shared" : false + }, + "Add statements" : { + "name" : "Add statements", + "body" : "PREFIX dc: \nINSERT DATA\n {\n GRAPH {\n dc:title \"A new book\" ;\n dc:creator \"A.N.Other\" .\n }\n }", + "shared" : false + }, + "Remove statements" : { + "name" : "Remove statements", + "body" : "PREFIX dc: \nDELETE DATA\n{\nGRAPH {\n dc:title \"A new book\" ;\n dc:creator \"A.N.Other\" .\n }\n}", + "shared" : false + } + } + } +} diff --git a/test/test_rdf4j/repo-configs/test-repo-config.ttl b/test/test_rdf4j/repo-configs/test-repo-config.ttl new file mode 100644 index 000000000..b73d67f2d --- /dev/null +++ b/test/test_rdf4j/repo-configs/test-repo-config.ttl @@ -0,0 +1,16 @@ +PREFIX config: + +[] a config:Repository ; + config:rep.id "test-repo" ; + config:rep.impl + [ + config:rep.type "openrdf:SailRepository" ; + config:sail.impl + [ + config:native.tripleIndexers "spoc,posc" ; + config:sail.defaultQueryEvaluationMode "STANDARD" ; + config:sail.iterationCacheSyncThreshold "10000" ; + config:sail.type "openrdf:NativeStore" ; + ] ; + ] ; +. diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_repo_management.py new file mode 100644 index 000000000..7fbc8942e --- /dev/null +++ b/test/test_rdf4j/test_repo_management.py @@ -0,0 +1,45 @@ +import pathlib + +import pytest + +from rdflib.rdf4j import RDF4JClient +from rdflib.rdf4j.client import RepositoryAlreadyExistsError, RepositoryNotFoundError + +GRAPHDB_PORT = 7200 + + +def test_repos(client: RDF4JClient): + assert client.repositories.list() == [] + + +def test_list_repo_non_existent(client: RDF4JClient): + assert client.repositories.list() == [] + with pytest.raises(RepositoryNotFoundError): + assert client.repositories.get("non-existent") is None + + +def test_repo_manager_crud(client: RDF4JClient): + # Empty state + assert client.repositories.list() == [] + + config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + assert repo.health() + # New repository created + assert len(client.repositories.list()) == 1 + + # Repo already exists error + with pytest.raises(RepositoryAlreadyExistsError): + client.repositories.create("test-repo", config) + + # Delete repository + client.repositories.delete("test-repo") + assert client.repositories.list() == [] + + # Deleting non-existent repo + with pytest.raises(RepositoryNotFoundError): + client.repositories.delete("test-repo") From 42708bde958765648046ccc5f4306ff20fe85941 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 30 Oct 2025 11:25:38 +1000 Subject: [PATCH 02/54] test: add test for repo listing format error and repo not healthy error --- test/test_rdf4j/test_repo_management.py | 43 ++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_repo_management.py index 7fbc8942e..8eaf53d4c 100644 --- a/test/test_rdf4j/test_repo_management.py +++ b/test/test_rdf4j/test_repo_management.py @@ -1,9 +1,15 @@ import pathlib import pytest +import httpx from rdflib.rdf4j import RDF4JClient -from rdflib.rdf4j.client import RepositoryAlreadyExistsError, RepositoryNotFoundError +from rdflib.rdf4j.client import ( + RepositoryAlreadyExistsError, + RepositoryNotFoundError, + RepositoryFormatError, + RepositoryNotHealthyError, +) GRAPHDB_PORT = 7200 @@ -18,6 +24,19 @@ def test_list_repo_non_existent(client: RDF4JClient): assert client.repositories.get("non-existent") is None +def test_list_repo_format_error(client: RDF4JClient, monkeypatch): + class MockResponse: + def json(self): + return {} + + def raise_for_status(self): + pass + + monkeypatch.setattr(httpx.Client, "get", lambda *args, **kwargs: MockResponse()) + with pytest.raises(RepositoryFormatError): + client.repositories.list() + + def test_repo_manager_crud(client: RDF4JClient): # Empty state assert client.repositories.list() == [] @@ -29,6 +48,7 @@ def test_repo_manager_crud(client: RDF4JClient): repo = client.repositories.create("test-repo", config) assert repo.identifier == "test-repo" assert repo.health() + # New repository created assert len(client.repositories.list()) == 1 @@ -43,3 +63,24 @@ def test_repo_manager_crud(client: RDF4JClient): # Deleting non-existent repo with pytest.raises(RepositoryNotFoundError): client.repositories.delete("test-repo") + + +def test_repo_not_healthy(client: RDF4JClient, monkeypatch): + config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + + class MockResponse: + def raise_for_status(self): + raise httpx.HTTPStatusError( + "", + request=httpx.Request("post", ""), + response=httpx.Response(status_code=500), + ) + + monkeypatch.setattr(httpx.Client, "post", lambda *args, **kwargs: MockResponse()) + with pytest.raises(RepositoryNotHealthyError): + repo.health() From 562bd1d698ad3e86433834891b8bfb66ca36f012 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 11:48:00 +1000 Subject: [PATCH 03/54] chore: add testcontainers comment --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0c560d4e0..ecbbd1595 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,8 @@ rdfgraphisomorphism = 'rdflib.tools.graphisomorphism:main' [tool.poetry.dependencies] # TODO: temporarily add new python version constraints for testcontainers +# We can remove the upper bound once testcontainers releases a new version +# https://github.com/testcontainers/testcontainers-python/pull/909 python = ">=3.9.2, <4.0" isodate = {version=">=0.7.2,<1.0.0", python = "<3.11"} pyparsing = ">=2.1.0,<4" From 00164e107b6d089846038f8f4768b44dcfe29dbf Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 12:56:51 +1000 Subject: [PATCH 04/54] docs: add google style docstring for mkdocs --- mkdocs.yml | 1 + rdflib/rdf4j/client.py | 115 +++++++++++++++++++++++++++-------------- 2 files changed, 76 insertions(+), 40 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index 2aa212c2c..7524e9355 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -43,6 +43,7 @@ nav: - Container: apidocs/rdflib.container.md - Collection: apidocs/rdflib.collection.md - Paths: apidocs/rdflib.paths.md + - RDF4J: apidocs/rdflib.rdf4j.md - Util: apidocs/rdflib.util.md - Plugins: - Parsers: apidocs/rdflib.plugins.parsers.md diff --git a/rdflib/rdf4j/client.py b/rdflib/rdf4j/client.py index ca4103158..8f2892e32 100644 --- a/rdflib/rdf4j/client.py +++ b/rdflib/rdf4j/client.py @@ -1,3 +1,4 @@ +"""RDF4J client module.""" from __future__ import annotations from dataclasses import dataclass @@ -28,8 +29,15 @@ class RepositoryAlreadyExistsError(RepositoryError): @dataclass(frozen=True) class RepositoryResult: - """RDF4J repository result object.""" - + """RDF4J repository result object. + + Parameters: + identifier: Repository identifier. + uri: Repository URI. + readable: Whether the repository is readable by the client. + writable: Whether the repository is writable by the client. + title: Repository title. + """ identifier: str uri: str readable: bool @@ -38,8 +46,13 @@ class RepositoryResult: class Repository: + """RDF4J repository client. + + Parameters: + identifier: The identifier of the repository. + http_client: The httpx.Client instance. + """ def __init__(self, identifier: str, http_client: httpx.Client): - """RDF4J repository client.""" self._identifier = identifier self._http_client = http_client @@ -49,12 +62,16 @@ def identifier(self): return self._identifier def health(self) -> bool: - """Check if the repository is healthy. + """Repository health check. + + Returns: + bool: True if the repository is healthy, otherwise an error is raised. - :returns: Returns True if the repository is healthy, otherwise an error is raised. - :raises httpx.RequestError: On network/connection issues. - :raises RepositoryNotFoundError: If the repository is not found. - :raises RepositoryNotHealthyError: If the repository is not healthy. + Raises: + RepositoryNotFoundError: If the repository is not found. + RepositoryNotHealthyError: If the repository is not healthy. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. """ headers = { "Content-Type": "application/sparql-query", @@ -79,10 +96,7 @@ def health(self) -> bool: class RepositoryManager: - """Client to manage server-level repository operations. - - This includes listing, creating, and deleting of repositories. - """ + """A client to manage server-level repository operations.""" def __init__(self, http_client: httpx.Client): self._http_client = http_client @@ -90,9 +104,13 @@ def __init__(self, http_client: httpx.Client): def list(self) -> list[RepositoryResult]: """List all available repositories. - :returns: List of repository results. - :raises httpx.RequestError: On network/connection issues. - :raises RepositoryFormatError: If the response format is unrecognized. + Returns: + list[RepositoryResult]: List of repository results. + + Raises: + RepositoryFormatError: If the response format is unrecognized. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. """ headers = { "Accept": "application/sparql-results+json", @@ -116,25 +134,32 @@ def list(self) -> list[RepositoryResult]: ] except (KeyError, ValueError) as err: raise RepositoryFormatError(f"Unrecognised response format: {err}") - except httpx.RequestError: + except (httpx.RequestError, httpx.HTTPStatusError): raise def get(self, repository_id: str) -> Repository: """Get a repository by ID. - This performs a health check before returning the repository object. + !!! note + This performs a health check before returning the repository object. - :param repository_id: The identifier of the repository. - :returns: The repository instance. - :raises httpx.RequestError: On network/connection issues. - :raises RepositoryNotFoundError: If the repository is not found. - :raises RepositoryNotHealthyError: If the repository is not healthy. + Parameters: + repository_id: The identifier of the repository. + + Returns: + Repository: The repository instance. + + Raises: + RepositoryNotFoundError: If the repository is not found. + RepositoryNotHealthyError: If the repository is not healthy. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. """ repo = Repository(repository_id, self._http_client) try: repo.health() return repo - except (httpx.RequestError, RepositoryNotFoundError, RepositoryNotHealthyError): + except (RepositoryNotFoundError, RepositoryNotHealthyError, httpx.RequestError): raise def create( @@ -142,12 +167,16 @@ def create( ) -> Repository: """Create a new repository. - :param repository_id: The identifier of the repository. - :param data: The repository configuration in RDF. - :param format: The repository configuration format. - :raises httpx.RequestError: On network/connection issues. - :raises RepositoryAlreadyExistsError: If the repository already exists. - :raises RepositoryNotHealthyError: If the repository is not healthy. + Parameters: + repository_id: The identifier of the repository. + data: The repository configuration in RDF. + format: The repository configuration format. + + Raises: + RepositoryAlreadyExistsError: If the repository already exists. + RepositoryNotHealthyError: If the repository is not healthy. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. """ try: headers = {"Content-Type": format} @@ -168,10 +197,14 @@ def create( def delete(self, repository_id: str) -> None: """Delete a repository. - :param repository_id: The identifier of the repository. - :raises httpx.RequestError: On network/connection issues. - :raises RepositoryNotFoundError: If the repository is not found. - :raises RepositoryError: If the repository is not deleted successfully. + Parameters: + repository_id: The identifier of the repository. + + Raises: + RepositoryNotFoundError: If the repository is not found. + RepositoryError: If the repository is not deleted successfully. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. """ try: response = self._http_client.delete(f"/repositories/{repository_id}") @@ -189,6 +222,14 @@ def delete(self, repository_id: str) -> None: class RDF4JClient: + """RDF4J client. + + Parameters: + base_url: The base URL of the RDF4J server. + auth: Authentication tuple (username, password). + timeout: Request timeout in seconds (default: 30.0). + kwargs: Additional keyword arguments to pass to the httpx.Client. + """ def __init__( self, base_url: str, @@ -196,13 +237,6 @@ def __init__( timeout: float = 30.0, **kwargs: Any, ): - """RDF4J client. - - :param base_url: The base URL of the RDF4J server. - :param auth: Authentication tuple (username, password). - :param timeout: Request timeout in seconds (default: 30.0). - :param kwargs: Additional keyword arguments to pass to the httpx.Client. - """ if not base_url.endswith("/"): base_url += "/" self._http_client = httpx.Client( @@ -222,4 +256,5 @@ def repositories(self): return self._repository_manager def close(self): + """Close the underlying httpx.Client.""" self._http_client.close() From 6c0520eb55f5e25a9f8a85df85a66c7297934079 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 12:57:12 +1000 Subject: [PATCH 05/54] test: add rdf4j client test --- test/test_rdf4j/test_client.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 test/test_rdf4j/test_client.py diff --git a/test/test_rdf4j/test_client.py b/test/test_rdf4j/test_client.py new file mode 100644 index 000000000..d5d6240ea --- /dev/null +++ b/test/test_rdf4j/test_client.py @@ -0,0 +1,6 @@ +from rdflib.rdf4j import RDF4JClient + + +def test_client_close_method(client: RDF4JClient): + client.close() + assert client._http_client.is_closed From b3242f1baaa38180dd580dc850bef2d42069a711 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 12:57:29 +1000 Subject: [PATCH 06/54] chore: add todo to only run rdf4j tests on python 3.9 or greater --- test/test_rdf4j/test_repo_management.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_repo_management.py index 8eaf53d4c..76d6b4ae5 100644 --- a/test/test_rdf4j/test_repo_management.py +++ b/test/test_rdf4j/test_repo_management.py @@ -11,8 +11,7 @@ RepositoryNotHealthyError, ) -GRAPHDB_PORT = 7200 - +# TODO: only run these tests on py39 or greater. Testcontainers not available on py38. def test_repos(client: RDF4JClient): assert client.repositories.list() == [] From bae5560b51fd74559264a8ac8f16526cd8b37792 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 13:05:37 +1000 Subject: [PATCH 07/54] chore: add todo to only run rdf4j tests on python 3.9 or greater --- test/test_rdf4j/test_repo_management.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_repo_management.py index 76d6b4ae5..513368568 100644 --- a/test/test_rdf4j/test_repo_management.py +++ b/test/test_rdf4j/test_repo_management.py @@ -1,18 +1,19 @@ import pathlib -import pytest import httpx +import pytest from rdflib.rdf4j import RDF4JClient from rdflib.rdf4j.client import ( RepositoryAlreadyExistsError, - RepositoryNotFoundError, RepositoryFormatError, + RepositoryNotFoundError, RepositoryNotHealthyError, ) # TODO: only run these tests on py39 or greater. Testcontainers not available on py38. + def test_repos(client: RDF4JClient): assert client.repositories.list() == [] From a5b71b21c854096d5ba47dc535e1db2803bdea4a Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 13:34:16 +1000 Subject: [PATCH 08/54] refactor: organise rdf4j package into rdflib.contrib --- rdflib/contrib/__init__.py | 0 rdflib/{ => contrib}/rdf4j/__init__.py | 0 rdflib/{ => contrib}/rdf4j/client.py | 57 ++++++++++++++----------- rdflib/contrib/rdf4j/exceptions.py | 25 +++++++++++ test/test_rdf4j/conftest.py | 2 +- test/test_rdf4j/test_client.py | 15 ++++++- test/test_rdf4j/test_repo_management.py | 4 +- 7 files changed, 75 insertions(+), 28 deletions(-) create mode 100644 rdflib/contrib/__init__.py rename rdflib/{ => contrib}/rdf4j/__init__.py (100%) rename rdflib/{ => contrib}/rdf4j/client.py (88%) create mode 100644 rdflib/contrib/rdf4j/exceptions.py diff --git a/rdflib/contrib/__init__.py b/rdflib/contrib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rdflib/rdf4j/__init__.py b/rdflib/contrib/rdf4j/__init__.py similarity index 100% rename from rdflib/rdf4j/__init__.py rename to rdflib/contrib/rdf4j/__init__.py diff --git a/rdflib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py similarity index 88% rename from rdflib/rdf4j/client.py rename to rdflib/contrib/rdf4j/client.py index 8f2892e32..70ead51c8 100644 --- a/rdflib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -1,4 +1,5 @@ """RDF4J client module.""" + from __future__ import annotations from dataclasses import dataclass @@ -6,30 +7,19 @@ import httpx - -class RepositoryError(Exception): - """Raised when interactions on a repository result in an error.""" - - -class RepositoryFormatError(RepositoryError): - """Raised when the repository format is invalid.""" - - -class RepositoryNotFoundError(RepositoryError): - """Raised when the repository is not found.""" - - -class RepositoryNotHealthyError(RepositoryError): - """Raised when the repository is not healthy.""" - - -class RepositoryAlreadyExistsError(RepositoryError): - """Raised when the repository already exists.""" +from rdflib.contrib.rdf4j.exceptions import ( + RDF4JUnsupportedProtocolError, + RepositoryAlreadyExistsError, + RepositoryError, + RepositoryFormatError, + RepositoryNotFoundError, + RepositoryNotHealthyError, +) @dataclass(frozen=True) -class RepositoryResult: - """RDF4J repository result object. +class RepositoryListingResult: + """RDF4J repository listing result. Parameters: identifier: Repository identifier. @@ -38,6 +28,7 @@ class RepositoryResult: writable: Whether the repository is writable by the client. title: Repository title. """ + identifier: str uri: str readable: bool @@ -52,6 +43,7 @@ class Repository: identifier: The identifier of the repository. http_client: The httpx.Client instance. """ + def __init__(self, identifier: str, http_client: httpx.Client): self._identifier = identifier self._http_client = http_client @@ -101,11 +93,11 @@ class RepositoryManager: def __init__(self, http_client: httpx.Client): self._http_client = http_client - def list(self) -> list[RepositoryResult]: + def list(self) -> list[RepositoryListingResult]: """List all available repositories. Returns: - list[RepositoryResult]: List of repository results. + list[RepositoryListingResult]: List of repository results. Raises: RepositoryFormatError: If the response format is unrecognized. @@ -123,7 +115,7 @@ def list(self) -> list[RepositoryResult]: data = response.json() results = data["results"]["bindings"] return [ - RepositoryResult( + RepositoryListingResult( identifier=repo["id"]["value"], uri=repo["uri"]["value"], readable=repo["readable"]["value"], @@ -230,6 +222,7 @@ class RDF4JClient: timeout: Request timeout in seconds (default: 30.0). kwargs: Additional keyword arguments to pass to the httpx.Client. """ + def __init__( self, base_url: str, @@ -242,6 +235,11 @@ def __init__( self._http_client = httpx.Client( base_url=base_url, auth=auth, timeout=timeout, **kwargs ) + if self.protocol < 12: + self.close() + raise RDF4JUnsupportedProtocolError( + f"RDF4J server protocol version {self.protocol} is not supported. Minimum required version is 12." + ) self._repository_manager = RepositoryManager(self._http_client) def __enter__(self): @@ -255,6 +253,17 @@ def repositories(self): """Server-level repository management operations.""" return self._repository_manager + @property + def protocol(self) -> float: + try: + response = self._http_client.get( + "/protocol", headers={"Accept": "text/plain"} + ) + response.raise_for_status() + return float(response.text.strip()) + except (httpx.RequestError, httpx.HTTPStatusError): + raise + def close(self): """Close the underlying httpx.Client.""" self._http_client.close() diff --git a/rdflib/contrib/rdf4j/exceptions.py b/rdflib/contrib/rdf4j/exceptions.py new file mode 100644 index 000000000..30ab2b87f --- /dev/null +++ b/rdflib/contrib/rdf4j/exceptions.py @@ -0,0 +1,25 @@ +"""RDF4J exceptions.""" + + +class RepositoryError(Exception): + """Raised when interactions on a repository result in an error.""" + + +class RepositoryFormatError(RepositoryError): + """Raised when the repository format is invalid.""" + + +class RepositoryNotFoundError(RepositoryError): + """Raised when the repository is not found.""" + + +class RepositoryNotHealthyError(RepositoryError): + """Raised when the repository is not healthy.""" + + +class RepositoryAlreadyExistsError(RepositoryError): + """Raised when the repository already exists.""" + + +class RDF4JUnsupportedProtocolError(Exception): + """Raised when the server does not support the protocol version.""" diff --git a/test/test_rdf4j/conftest.py b/test/test_rdf4j/conftest.py index ab20d3794..ea3994a42 100644 --- a/test/test_rdf4j/conftest.py +++ b/test/test_rdf4j/conftest.py @@ -5,7 +5,7 @@ from testcontainers.core.image import DockerImage from testcontainers.core.waiting_utils import wait_for_logs -from rdflib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j import RDF4JClient RDF4J_IMAGE = "eclipse/rdf4j-workbench:5.1.6-jetty" RDF4J_PORT = 8080 diff --git a/test/test_rdf4j/test_client.py b/test/test_rdf4j/test_client.py index d5d6240ea..365d5d34b 100644 --- a/test/test_rdf4j/test_client.py +++ b/test/test_rdf4j/test_client.py @@ -1,6 +1,19 @@ -from rdflib.rdf4j import RDF4JClient +import pytest + +from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError def test_client_close_method(client: RDF4JClient): client.close() assert client._http_client.is_closed + + +def test_client_protocol(client: RDF4JClient): + assert client.protocol >= 12 + + +def test_client_protocol_error(monkeypatch): + monkeypatch.setattr(RDF4JClient, "protocol", 11) + with pytest.raises(RDF4JUnsupportedProtocolError): + RDF4JClient("http://example.com/") diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_repo_management.py index 513368568..ed706a5c6 100644 --- a/test/test_rdf4j/test_repo_management.py +++ b/test/test_rdf4j/test_repo_management.py @@ -3,8 +3,8 @@ import httpx import pytest -from rdflib.rdf4j import RDF4JClient -from rdflib.rdf4j.client import ( +from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, RepositoryFormatError, RepositoryNotFoundError, From 8cf0178f5d516cd8c3796e9299f3bfd3fbff738a Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 14:00:36 +1000 Subject: [PATCH 09/54] feat: add http_client property --- rdflib/contrib/rdf4j/client.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 70ead51c8..96bb4ec06 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -48,6 +48,10 @@ def __init__(self, identifier: str, http_client: httpx.Client): self._identifier = identifier self._http_client = http_client + @property + def http_client(self): + return self._http_client + @property def identifier(self): """Repository identifier.""" @@ -70,7 +74,7 @@ def health(self) -> bool: "Accept": "application/sparql-results+json", } try: - response = self._http_client.post( + response = self.http_client.post( f"/repositories/{self._identifier}", headers=headers, content="ASK {}" ) response.raise_for_status() @@ -93,6 +97,10 @@ class RepositoryManager: def __init__(self, http_client: httpx.Client): self._http_client = http_client + @property + def http_client(self): + return self._http_client + def list(self) -> list[RepositoryListingResult]: """List all available repositories. @@ -108,7 +116,7 @@ def list(self) -> list[RepositoryListingResult]: "Accept": "application/sparql-results+json", } try: - response = self._http_client.get("/repositories", headers=headers) + response = self.http_client.get("/repositories", headers=headers) response.raise_for_status() try: @@ -147,7 +155,7 @@ def get(self, repository_id: str) -> Repository: httpx.RequestError: On network/connection issues. httpx.HTTPStatusError: Unhandled status code error. """ - repo = Repository(repository_id, self._http_client) + repo = Repository(repository_id, self.http_client) try: repo.health() return repo @@ -172,7 +180,7 @@ def create( """ try: headers = {"Content-Type": format} - response = self._http_client.put( + response = self.http_client.put( f"/repositories/{repository_id}", headers=headers, content=data ) response.raise_for_status() @@ -199,7 +207,7 @@ def delete(self, repository_id: str) -> None: httpx.HTTPStatusError: Unhandled status code error. """ try: - response = self._http_client.delete(f"/repositories/{repository_id}") + response = self.http_client.delete(f"/repositories/{repository_id}") response.raise_for_status() if response.status_code != 204: raise RepositoryError( @@ -240,7 +248,7 @@ def __init__( raise RDF4JUnsupportedProtocolError( f"RDF4J server protocol version {self.protocol} is not supported. Minimum required version is 12." ) - self._repository_manager = RepositoryManager(self._http_client) + self._repository_manager = RepositoryManager(self.http_client) def __enter__(self): return self @@ -248,6 +256,10 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): self.close() + @property + def http_client(self): + return self._http_client + @property def repositories(self): """Server-level repository management operations.""" @@ -256,7 +268,7 @@ def repositories(self): @property def protocol(self) -> float: try: - response = self._http_client.get( + response = self.http_client.get( "/protocol", headers={"Accept": "text/plain"} ) response.raise_for_status() @@ -266,4 +278,4 @@ def protocol(self) -> float: def close(self): """Close the underlying httpx.Client.""" - self._http_client.close() + self.http_client.close() From e81322a8ec338aecf9d2b0ca063006b91c95cd47 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 14:01:18 +1000 Subject: [PATCH 10/54] feat: bootstrap a minimal graphdb client --- rdflib/contrib/graphdb/__init__.py | 3 + rdflib/contrib/graphdb/client.py | 61 +++++++++++++++++ test/test_rdf4j/test_graphdb/conftest.py | 11 ++++ .../test_graphdb/test_repo_management.py | 66 +++++++++++++++++++ 4 files changed, 141 insertions(+) create mode 100644 rdflib/contrib/graphdb/__init__.py create mode 100644 rdflib/contrib/graphdb/client.py create mode 100644 test/test_rdf4j/test_graphdb/conftest.py create mode 100644 test/test_rdf4j/test_graphdb/test_repo_management.py diff --git a/rdflib/contrib/graphdb/__init__.py b/rdflib/contrib/graphdb/__init__.py new file mode 100644 index 000000000..ca6a8ef4d --- /dev/null +++ b/rdflib/contrib/graphdb/__init__.py @@ -0,0 +1,3 @@ +from .client import GraphDBClient + +__all__ = ["GraphDBClient"] diff --git a/rdflib/contrib/graphdb/client.py b/rdflib/contrib/graphdb/client.py new file mode 100644 index 000000000..10c396020 --- /dev/null +++ b/rdflib/contrib/graphdb/client.py @@ -0,0 +1,61 @@ +import httpx + +import rdflib.contrib.rdf4j +from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.exceptions import ( + RepositoryNotFoundError, + RepositoryNotHealthyError, +) + + +class Repository(rdflib.contrib.rdf4j.client.Repository): + """GraphDB Repository""" + + def health(self, timeout: int = 5) -> bool: + """Repository health check. + + Parameters: + timeout: A timeout parameter in seconds. If provided, the endpoint attempts + to retrieve the repository within this timeout. If not, the passive + check is performed. + + Returns: + bool: True if the repository is healthy, otherwise an error is raised. + + Raises: + RepositoryNotFoundError: If the repository is not found. + RepositoryNotHealthyError: If the repository is not healthy. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + try: + params = {"passive": str(timeout)} + response = self.http_client.get( + f"/repositories/{self.identifier}/health", params=params + ) + response.raise_for_status() + return True + except httpx.HTTPStatusError as err: + if err.response.status_code == 404: + raise RepositoryNotFoundError( + f"Repository {self._identifier} not found." + ) + raise RepositoryNotHealthyError( + f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}" + ) + except httpx.RequestError: + raise + + +class RepositoryManager(rdflib.contrib.rdf4j.client.RepositoryManager): + """GraphDB Repository Manager""" + + def get(self, repository_id: str) -> Repository: + _repo = super().get(repository_id) + return Repository(_repo.identifier, _repo.http_client) + + +class GraphDBClient(RDF4JClient): + """GraphDB Client""" + + # TODO: GraphDB specific API methods. diff --git a/test/test_rdf4j/test_graphdb/conftest.py b/test/test_rdf4j/test_graphdb/conftest.py new file mode 100644 index 000000000..725e16994 --- /dev/null +++ b/test/test_rdf4j/test_graphdb/conftest.py @@ -0,0 +1,11 @@ +import pytest +from testcontainers.core.container import DockerContainer + +from rdflib.contrib.graphdb import GraphDBClient + + +@pytest.fixture(scope="function") +def client(graphdb_container: DockerContainer): + port = graphdb_container.get_exposed_port(7200) + with GraphDBClient(f"http://localhost:{port}/", auth=("admin", "admin")) as client: + yield client diff --git a/test/test_rdf4j/test_graphdb/test_repo_management.py b/test/test_rdf4j/test_graphdb/test_repo_management.py new file mode 100644 index 000000000..7916d0a5e --- /dev/null +++ b/test/test_rdf4j/test_graphdb/test_repo_management.py @@ -0,0 +1,66 @@ +import pathlib + +import httpx +import pytest + +from rdflib.contrib.graphdb import GraphDBClient +from rdflib.contrib.rdf4j.exceptions import ( + RepositoryAlreadyExistsError, + RepositoryNotFoundError, + RepositoryNotHealthyError, +) + +# TODO: consider parameterizing the client (RDF4JClient, GraphDBClient) + + +def test_repo_manager_crud(client: GraphDBClient): + # Empty state + assert client.repositories.list() == [] + + config_path = ( + pathlib.Path(__file__).parent.parent / "repo-configs/test-repo-config.ttl" + ) + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + assert repo.health() + + # New repository created + assert len(client.repositories.list()) == 1 + + # Repo already exists error + with pytest.raises(RepositoryAlreadyExistsError): + client.repositories.create("test-repo", config) + + # Delete repository + client.repositories.delete("test-repo") + assert client.repositories.list() == [] + + # Deleting non-existent repo + with pytest.raises(RepositoryNotFoundError): + client.repositories.delete("test-repo") + + +def test_repo_not_healthy(client: GraphDBClient, monkeypatch): + config_path = ( + pathlib.Path(__file__).parent.parent / "repo-configs/test-repo-config.ttl" + ) + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + + class MockResponse: + def raise_for_status(self): + raise httpx.HTTPStatusError( + "", + request=httpx.Request("post", ""), + response=httpx.Response(status_code=500), + ) + + monkeypatch.setattr(httpx.Client, "post", lambda *args, **kwargs: MockResponse()) + with pytest.raises(RepositoryNotHealthyError): + repo.health() From 6d744ceafa13e243773d9bac2e60ce455b81e37f Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 31 Oct 2025 14:02:47 +1000 Subject: [PATCH 11/54] chore: remove unused rdf4j testcontainer --- test/test_rdf4j/conftest.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/test/test_rdf4j/conftest.py b/test/test_rdf4j/conftest.py index ea3994a42..9592afeac 100644 --- a/test/test_rdf4j/conftest.py +++ b/test/test_rdf4j/conftest.py @@ -7,21 +7,9 @@ from rdflib.contrib.rdf4j import RDF4JClient -RDF4J_IMAGE = "eclipse/rdf4j-workbench:5.1.6-jetty" -RDF4J_PORT = 8080 GRAPHDB_PORT = 7200 -@pytest.fixture(scope="function") -def rdf4j_container(): - container = DockerContainer(RDF4J_IMAGE) - container.with_exposed_ports(RDF4J_PORT) - container.start() - wait_for_logs(container, "oejs.Server:main: Started") - yield container - container.stop() - - @pytest.fixture(scope="function") def graphdb_container(): with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image: From f52473e6e14bc1a2aaf93aa7590a1bd3aa75199a Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Sun, 2 Nov 2025 15:46:56 +1000 Subject: [PATCH 12/54] chore: add testcontainer pytest marker, refactor rdf4j test structure, add overwrite and get methods on Repository class and add tests for them --- Taskfile.yml | 8 + pyproject.toml | 1 + rdflib/contrib/rdf4j/client.py | 92 +++++- test/test_rdf4j/data/quads-1.nq | 2 + test/test_rdf4j/{ => test_e2e}/conftest.py | 0 .../{ => test_e2e}/docker/Dockerfile | 0 .../{ => test_e2e}/docker/settings.txt | 0 .../{ => test_e2e}/docker/users.txt | 0 .../repo-configs/test-repo-config.ttl | 0 test/test_rdf4j/{ => test_e2e}/test_client.py | 3 + .../{ => test_e2e}/test_graphdb/conftest.py | 0 .../test_graphdb_repo_management.py} | 2 + .../{ => test_e2e}/test_repo_management.py | 5 + test/test_rdf4j/test_repo_overwrite_method.py | 279 ++++++++++++++++++ 14 files changed, 388 insertions(+), 4 deletions(-) create mode 100644 test/test_rdf4j/data/quads-1.nq rename test/test_rdf4j/{ => test_e2e}/conftest.py (100%) rename test/test_rdf4j/{ => test_e2e}/docker/Dockerfile (100%) rename test/test_rdf4j/{ => test_e2e}/docker/settings.txt (100%) rename test/test_rdf4j/{ => test_e2e}/docker/users.txt (100%) rename test/test_rdf4j/{ => test_e2e}/repo-configs/test-repo-config.ttl (100%) rename test/test_rdf4j/{ => test_e2e}/test_client.py (86%) rename test/test_rdf4j/{ => test_e2e}/test_graphdb/conftest.py (100%) rename test/test_rdf4j/{test_graphdb/test_repo_management.py => test_e2e/test_graphdb/test_graphdb_repo_management.py} (97%) rename test/test_rdf4j/{ => test_e2e}/test_repo_management.py (95%) create mode 100644 test/test_rdf4j/test_repo_overwrite_method.py diff --git a/Taskfile.yml b/Taskfile.yml index 735b634f7..0c913c83c 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -378,3 +378,11 @@ tasks: sys.stderr.write(f"removing {path}\n") shutil.rmtree(path, ignore_errors=True) ' {{.RIMRAF_TARGET}} + + test:rdf4j: + desc: Run fast tests against rdflib.contrib.rdf4j package + cmd: '{{.TEST_HARNESS}}{{.RUN_PREFIX}} pytest -m "not (testcontainer or webtest)" test/test_rdf4j' + + test:rdf4j:all: + desc: Run all tests against rdflib.contrib.rdf4j package + cmd: '{{.TEST_HARNESS}}{{.RUN_PREFIX}} pytest test/test_rdf4j' diff --git a/pyproject.toml b/pyproject.toml index 6d30cb266..4101742e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -224,6 +224,7 @@ filterwarnings = [ "ignore:Code. _pytestfixturefunction is not defined in namespace .*:UserWarning", ] markers = [ + "testcontainer: mark a test that uses testcontainer", "webtest: mark a test as using the internet", ] # log_cli = true diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 96bb4ec06..132b66652 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -2,8 +2,9 @@ from __future__ import annotations +import io from dataclasses import dataclass -from typing import Any +from typing import Any, BinaryIO, Iterable import httpx @@ -15,6 +16,8 @@ RepositoryNotFoundError, RepositoryNotHealthyError, ) +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph +from rdflib.term import IdentifiedNode, URIRef @dataclass(frozen=True) @@ -90,6 +93,87 @@ def health(self) -> bool: except httpx.RequestError: raise + def get(self, content_type: str = "application/n-quads") -> str | Graph | Dataset: + # TODO: add parameters + try: + headers = {"Accept": content_type} + response = self.http_client.get( + f"/repositories/{self.identifier}/statements", headers=headers + ) + response.raise_for_status() + triple_formats = [ + "application/n-triples", + "text/turtle", + "application/rdf+xml", + ] + if content_type in triple_formats: + return Graph().parse(data=response.text, format=content_type) + return Dataset().parse(data=response.text, format=content_type) + except (httpx.RequestError, httpx.HTTPStatusError): + raise + + def upload(self, data, content_type: str, base_uri: str): + raise NotImplementedError + + def overwrite( + self, + data: str | bytes | BinaryIO, + content_type: str, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + base_uri: str | None = None, + ): + if isinstance(data, str): + # Check if it looks like a file path. Assumes file path length is less than 260. + if "\n" not in data and len(data) < 260: + try: + stream = open(data, "rb") + should_close = True + except (FileNotFoundError, OSError): + # Treat as raw string content + stream = io.BytesIO(data.encode("utf-8")) + should_close = False + else: + # Treat as raw string content + stream = io.BytesIO(data.encode("utf-8")) + should_close = False + elif isinstance(data, bytes): + stream = io.BytesIO(data) + should_close = False + else: + # Assume it's already a file-like object + stream = data + should_close = False + + try: + headers = {"Content-Type": content_type} + params = {} + if graph_name is not None and isinstance(graph_name, IdentifiedNode): + if graph_name == DATASET_DEFAULT_GRAPH_ID: + # Special RDF4J null value for context-less statements. + params["context"] = "null" + else: + params["context"] = graph_name.n3() + elif graph_name is not None and isinstance(graph_name, str): + params["context"] = URIRef(graph_name).n3() + elif graph_name is not None and isinstance(graph_name, Iterable): + graph_names = ",".join([x.n3() for x in graph_name]) + params["context"] = graph_names + + if base_uri is not None: + params["baseURI"] = base_uri + + response = self.http_client.put( + f"/repositories/{self.identifier}/statements", + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + + finally: + if should_close: + stream.close() + class RepositoryManager: """A client to manage server-level repository operations.""" @@ -163,14 +247,14 @@ def get(self, repository_id: str) -> Repository: raise def create( - self, repository_id: str, data: str, format: str = "text/turtle" + self, repository_id: str, data: str, content_type: str = "text/turtle" ) -> Repository: """Create a new repository. Parameters: repository_id: The identifier of the repository. data: The repository configuration in RDF. - format: The repository configuration format. + content_type: The repository configuration content type. Raises: RepositoryAlreadyExistsError: If the repository already exists. @@ -179,7 +263,7 @@ def create( httpx.HTTPStatusError: Unhandled status code error. """ try: - headers = {"Content-Type": format} + headers = {"Content-Type": content_type} response = self.http_client.put( f"/repositories/{repository_id}", headers=headers, content=data ) diff --git a/test/test_rdf4j/data/quads-1.nq b/test/test_rdf4j/data/quads-1.nq new file mode 100644 index 000000000..332b48ded --- /dev/null +++ b/test/test_rdf4j/data/quads-1.nq @@ -0,0 +1,2 @@ + . + . diff --git a/test/test_rdf4j/conftest.py b/test/test_rdf4j/test_e2e/conftest.py similarity index 100% rename from test/test_rdf4j/conftest.py rename to test/test_rdf4j/test_e2e/conftest.py diff --git a/test/test_rdf4j/docker/Dockerfile b/test/test_rdf4j/test_e2e/docker/Dockerfile similarity index 100% rename from test/test_rdf4j/docker/Dockerfile rename to test/test_rdf4j/test_e2e/docker/Dockerfile diff --git a/test/test_rdf4j/docker/settings.txt b/test/test_rdf4j/test_e2e/docker/settings.txt similarity index 100% rename from test/test_rdf4j/docker/settings.txt rename to test/test_rdf4j/test_e2e/docker/settings.txt diff --git a/test/test_rdf4j/docker/users.txt b/test/test_rdf4j/test_e2e/docker/users.txt similarity index 100% rename from test/test_rdf4j/docker/users.txt rename to test/test_rdf4j/test_e2e/docker/users.txt diff --git a/test/test_rdf4j/repo-configs/test-repo-config.ttl b/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl similarity index 100% rename from test/test_rdf4j/repo-configs/test-repo-config.ttl rename to test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl diff --git a/test/test_rdf4j/test_client.py b/test/test_rdf4j/test_e2e/test_client.py similarity index 86% rename from test/test_rdf4j/test_client.py rename to test/test_rdf4j/test_e2e/test_client.py index 365d5d34b..45281ee23 100644 --- a/test/test_rdf4j/test_client.py +++ b/test/test_rdf4j/test_e2e/test_client.py @@ -4,15 +4,18 @@ from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError +@pytest.mark.testcontainer def test_client_close_method(client: RDF4JClient): client.close() assert client._http_client.is_closed +@pytest.mark.testcontainer def test_client_protocol(client: RDF4JClient): assert client.protocol >= 12 +@pytest.mark.testcontainer def test_client_protocol_error(monkeypatch): monkeypatch.setattr(RDF4JClient, "protocol", 11) with pytest.raises(RDF4JUnsupportedProtocolError): diff --git a/test/test_rdf4j/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py similarity index 100% rename from test/test_rdf4j/test_graphdb/conftest.py rename to test/test_rdf4j/test_e2e/test_graphdb/conftest.py diff --git a/test/test_rdf4j/test_graphdb/test_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py similarity index 97% rename from test/test_rdf4j/test_graphdb/test_repo_management.py rename to test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py index 7916d0a5e..6102fa424 100644 --- a/test/test_rdf4j/test_graphdb/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py @@ -13,6 +13,7 @@ # TODO: consider parameterizing the client (RDF4JClient, GraphDBClient) +@pytest.mark.testcontainer def test_repo_manager_crud(client: GraphDBClient): # Empty state assert client.repositories.list() == [] @@ -43,6 +44,7 @@ def test_repo_manager_crud(client: GraphDBClient): client.repositories.delete("test-repo") +@pytest.mark.testcontainer def test_repo_not_healthy(client: GraphDBClient, monkeypatch): config_path = ( pathlib.Path(__file__).parent.parent / "repo-configs/test-repo-config.ttl" diff --git a/test/test_rdf4j/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py similarity index 95% rename from test/test_rdf4j/test_repo_management.py rename to test/test_rdf4j/test_e2e/test_repo_management.py index ed706a5c6..28cd77e50 100644 --- a/test/test_rdf4j/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -14,16 +14,19 @@ # TODO: only run these tests on py39 or greater. Testcontainers not available on py38. +@pytest.mark.testcontainer def test_repos(client: RDF4JClient): assert client.repositories.list() == [] +@pytest.mark.testcontainer def test_list_repo_non_existent(client: RDF4JClient): assert client.repositories.list() == [] with pytest.raises(RepositoryNotFoundError): assert client.repositories.get("non-existent") is None +@pytest.mark.testcontainer def test_list_repo_format_error(client: RDF4JClient, monkeypatch): class MockResponse: def json(self): @@ -37,6 +40,7 @@ def raise_for_status(self): client.repositories.list() +@pytest.mark.testcontainer def test_repo_manager_crud(client: RDF4JClient): # Empty state assert client.repositories.list() == [] @@ -65,6 +69,7 @@ def test_repo_manager_crud(client: RDF4JClient): client.repositories.delete("test-repo") +@pytest.mark.testcontainer def test_repo_not_healthy(client: RDF4JClient, monkeypatch): config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" with open(config_path) as file: diff --git a/test/test_rdf4j/test_repo_overwrite_method.py b/test/test_rdf4j/test_repo_overwrite_method.py new file mode 100644 index 000000000..72d739ef5 --- /dev/null +++ b/test/test_rdf4j/test_repo_overwrite_method.py @@ -0,0 +1,279 @@ +from __future__ import annotations + +import io +import pathlib +from typing import Iterable +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib import BNode, IdentifiedNode, URIRef +from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.client import Repository, RepositoryManager +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID + + +@pytest.fixture(scope="function") +def client(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(RDF4JClient, "protocol", 12) + with RDF4JClient("http://localhost/", auth=("admin", "admin")) as client: + yield client + + +@pytest.fixture(scope="function") +def repo(client: RDF4JClient, monkeypatch: pytest.MonkeyPatch): + with httpx.Client() as http_client: + monkeypatch.setattr( + RepositoryManager, + "create", + lambda *args, **kwargs: Repository("test-repo", http_client), + ) + + repo = client.repositories.create("test-repo", "") + assert repo.identifier == "test-repo" + yield repo + + +def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch): + """Test that a file path is treated as a file to be read and closed when done.""" + file_path = pathlib.Path(__file__).parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.overwrite(str(file_path), "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert hasattr(content, "read") + assert hasattr(content, "name") + assert content.name == str(file_path) + assert content.closed + + +def test_repo_overwrite_buffered_reader( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a file-like object is read and not closed when done.""" + file_path = pathlib.Path(__file__).parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + with open(file_path, "rb") as file: + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.overwrite(file, "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=file, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert not content.closed + + +@pytest.mark.parametrize( + "data", + [ + " .", + b" .", + ], +) +def test_repo_overwrite_data( + repo: Repository, data: str | bytes, monkeypatch: pytest.MonkeyPatch +): + """Test that str and bytes data is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.overwrite(data, "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_repo_overwrite_graph_name( + repo: Repository, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None, + expected_graph_name_param: str, + monkeypatch: pytest.MonkeyPatch, +): + """Test that graph_name is passed as a query parameter and correctly handles the different type variations.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + if graph_name is None: + params = {} + else: + params = {"context": expected_graph_name_param} + repo.overwrite("", "application/n-quads", graph_name) + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + + +@pytest.mark.parametrize( + "base_uri, expected_params", + [ + ["", {"baseURI": ""}], + ["http://example.com", {"baseURI": "http://example.com"}], + [None, {}], + ], +) +def test_repo_overwrite_base_uri( + repo: Repository, + base_uri: str | None, + expected_params: dict[str, str], + monkeypatch: pytest.MonkeyPatch, +): + """Test that base_uri is passed as a query parameter.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + repo.overwrite("", "application/n-quads", base_uri=base_uri) + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=expected_params, + content=ANY, + ) + + +def test_repo_overwrite_nonexistent_file_path( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a string that looks like a file path but doesn't exist is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + nonexistent_path = "/nonexistent/path/file.nq" + repo.overwrite(nonexistent_path, "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_overwrite_string_with_newline( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a string with newlines is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + data_with_newline = " .\n ." + repo.overwrite(data_with_newline, "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_overwrite_long_string(repo: Repository, monkeypatch: pytest.MonkeyPatch): + """Test that a string longer than 260 characters is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + # Create a string longer than 260 characters + long_string = "a" * 261 + repo.overwrite(long_string, "application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_overwrite_graph_name_and_base_uri( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that both graph_name and base_uri can be provided together.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = { + "context": "", + "baseURI": "http://example.com/base", + } + repo.overwrite( + "", + "application/n-quads", + graph_name="http://example.com/graph", + base_uri="http://example.com/base", + ) + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) From b550323d8f049a6a117188e67463fe9924ba02cc Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Mon, 3 Nov 2025 12:23:50 +1000 Subject: [PATCH 13/54] test: rearrange unit tests --- test/test_rdf4j/{test_e2e => e2e}/conftest.py | 0 .../{test_e2e => e2e}/docker/Dockerfile | 0 .../{test_e2e => e2e}/docker/settings.txt | 0 .../{test_e2e => e2e}/docker/users.txt | 0 .../repo-configs/test-repo-config.ttl | 0 .../{test_e2e => e2e}/test_client.py | 0 .../test_graphdb/conftest.py | 0 .../test_graphdb_repo_management.py | 0 .../{test_e2e => e2e}/test_repo_management.py | 0 test/test_rdf4j/unit/repository/conftest.py | 28 +++++++++++++++++++ .../repository}/test_repo_overwrite_method.py | 28 ++----------------- 11 files changed, 31 insertions(+), 25 deletions(-) rename test/test_rdf4j/{test_e2e => e2e}/conftest.py (100%) rename test/test_rdf4j/{test_e2e => e2e}/docker/Dockerfile (100%) rename test/test_rdf4j/{test_e2e => e2e}/docker/settings.txt (100%) rename test/test_rdf4j/{test_e2e => e2e}/docker/users.txt (100%) rename test/test_rdf4j/{test_e2e => e2e}/repo-configs/test-repo-config.ttl (100%) rename test/test_rdf4j/{test_e2e => e2e}/test_client.py (100%) rename test/test_rdf4j/{test_e2e => e2e}/test_graphdb/conftest.py (100%) rename test/test_rdf4j/{test_e2e => e2e}/test_graphdb/test_graphdb_repo_management.py (100%) rename test/test_rdf4j/{test_e2e => e2e}/test_repo_management.py (100%) create mode 100644 test/test_rdf4j/unit/repository/conftest.py rename test/test_rdf4j/{ => unit/repository}/test_repo_overwrite_method.py (89%) diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/e2e/conftest.py similarity index 100% rename from test/test_rdf4j/test_e2e/conftest.py rename to test/test_rdf4j/e2e/conftest.py diff --git a/test/test_rdf4j/test_e2e/docker/Dockerfile b/test/test_rdf4j/e2e/docker/Dockerfile similarity index 100% rename from test/test_rdf4j/test_e2e/docker/Dockerfile rename to test/test_rdf4j/e2e/docker/Dockerfile diff --git a/test/test_rdf4j/test_e2e/docker/settings.txt b/test/test_rdf4j/e2e/docker/settings.txt similarity index 100% rename from test/test_rdf4j/test_e2e/docker/settings.txt rename to test/test_rdf4j/e2e/docker/settings.txt diff --git a/test/test_rdf4j/test_e2e/docker/users.txt b/test/test_rdf4j/e2e/docker/users.txt similarity index 100% rename from test/test_rdf4j/test_e2e/docker/users.txt rename to test/test_rdf4j/e2e/docker/users.txt diff --git a/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl b/test/test_rdf4j/e2e/repo-configs/test-repo-config.ttl similarity index 100% rename from test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl rename to test/test_rdf4j/e2e/repo-configs/test-repo-config.ttl diff --git a/test/test_rdf4j/test_e2e/test_client.py b/test/test_rdf4j/e2e/test_client.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_client.py rename to test/test_rdf4j/e2e/test_client.py diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/e2e/test_graphdb/conftest.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_graphdb/conftest.py rename to test/test_rdf4j/e2e/test_graphdb/conftest.py diff --git a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/e2e/test_graphdb/test_graphdb_repo_management.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py rename to test/test_rdf4j/e2e/test_graphdb/test_graphdb_repo_management.py diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/e2e/test_repo_management.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_management.py rename to test/test_rdf4j/e2e/test_repo_management.py diff --git a/test/test_rdf4j/unit/repository/conftest.py b/test/test_rdf4j/unit/repository/conftest.py new file mode 100644 index 000000000..12951e377 --- /dev/null +++ b/test/test_rdf4j/unit/repository/conftest.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import httpx +import pytest + +from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.client import Repository, RepositoryManager + + +@pytest.fixture(scope="function") +def client(monkeypatch: pytest.MonkeyPatch): + monkeypatch.setattr(RDF4JClient, "protocol", 12) + with RDF4JClient("http://localhost/", auth=("admin", "admin")) as client: + yield client + + +@pytest.fixture(scope="function") +def repo(client: RDF4JClient, monkeypatch: pytest.MonkeyPatch): + with httpx.Client() as http_client: + monkeypatch.setattr( + RepositoryManager, + "create", + lambda *args, **kwargs: Repository("test-repo", http_client), + ) + + repo = client.repositories.create("test-repo", "") + assert repo.identifier == "test-repo" + yield repo diff --git a/test/test_rdf4j/test_repo_overwrite_method.py b/test/test_rdf4j/unit/repository/test_repo_overwrite_method.py similarity index 89% rename from test/test_rdf4j/test_repo_overwrite_method.py rename to test/test_rdf4j/unit/repository/test_repo_overwrite_method.py index 72d739ef5..373735f2f 100644 --- a/test/test_rdf4j/test_repo_overwrite_method.py +++ b/test/test_rdf4j/unit/repository/test_repo_overwrite_method.py @@ -9,35 +9,13 @@ import pytest from rdflib import BNode, IdentifiedNode, URIRef -from rdflib.contrib.rdf4j import RDF4JClient -from rdflib.contrib.rdf4j.client import Repository, RepositoryManager +from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID -@pytest.fixture(scope="function") -def client(monkeypatch: pytest.MonkeyPatch): - monkeypatch.setattr(RDF4JClient, "protocol", 12) - with RDF4JClient("http://localhost/", auth=("admin", "admin")) as client: - yield client - - -@pytest.fixture(scope="function") -def repo(client: RDF4JClient, monkeypatch: pytest.MonkeyPatch): - with httpx.Client() as http_client: - monkeypatch.setattr( - RepositoryManager, - "create", - lambda *args, **kwargs: Repository("test-repo", http_client), - ) - - repo = client.repositories.create("test-repo", "") - assert repo.identifier == "test-repo" - yield repo - - def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch): """Test that a file path is treated as a file to be read and closed when done.""" - file_path = pathlib.Path(__file__).parent / "data/quads-1.nq" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" mock = Mock() monkeypatch.setattr(httpx.Client, "put", mock) headers = { @@ -63,7 +41,7 @@ def test_repo_overwrite_buffered_reader( repo: Repository, monkeypatch: pytest.MonkeyPatch ): """Test that a file-like object is read and not closed when done.""" - file_path = pathlib.Path(__file__).parent / "data/quads-1.nq" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" mock = Mock() monkeypatch.setattr(httpx.Client, "put", mock) with open(file_path, "rb") as file: From cc1840c84827d1b8ad4a39900cfb1ee51c59fdd6 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Mon, 3 Nov 2025 14:39:04 +1000 Subject: [PATCH 14/54] feat: Repository get method --- mkdocs.yml | 2 +- rdflib/contrib/rdf4j/client.py | 90 ++++++-- rdflib/contrib/rdf4j/exceptions.py | 4 + rdflib/contrib/rdf4j/util.py | 32 +++ .../unit/repository/test_repo_get_method.py | 201 ++++++++++++++++++ test/test_rdf4j/unit/util/test_rdf4j_util.py | 28 +++ 6 files changed, 334 insertions(+), 23 deletions(-) create mode 100644 rdflib/contrib/rdf4j/util.py create mode 100644 test/test_rdf4j/unit/repository/test_repo_get_method.py create mode 100644 test/test_rdf4j/unit/util/test_rdf4j_util.py diff --git a/mkdocs.yml b/mkdocs.yml index 7524e9355..da4f8e63f 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -43,7 +43,7 @@ nav: - Container: apidocs/rdflib.container.md - Collection: apidocs/rdflib.collection.md - Paths: apidocs/rdflib.paths.md - - RDF4J: apidocs/rdflib.rdf4j.md + - RDF4J: apidocs/rdflib.contrib.rdf4j.md - Util: apidocs/rdflib.util.md - Plugins: - Parsers: apidocs/rdflib.plugins.parsers.md diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 132b66652..c00f77047 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -3,6 +3,7 @@ from __future__ import annotations import io +import typing as t from dataclasses import dataclass from typing import Any, BinaryIO, Iterable @@ -14,10 +15,15 @@ RepositoryError, RepositoryFormatError, RepositoryNotFoundError, - RepositoryNotHealthyError, + RepositoryNotHealthyError, RDFLibParserError, ) -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph -from rdflib.term import IdentifiedNode, URIRef +from rdflib.contrib.rdf4j.util import build_context_param +from rdflib.graph import Dataset, Graph +from rdflib.term import IdentifiedNode, Literal, URIRef + +SubjectType = t.Union[IdentifiedNode, None] +PredicateType = t.Union[URIRef, None] +ObjectType = t.Union[IdentifiedNode, Literal, None] @dataclass(frozen=True) @@ -93,12 +99,59 @@ def health(self) -> bool: except httpx.RequestError: raise - def get(self, content_type: str = "application/n-quads") -> str | Graph | Dataset: - # TODO: add parameters + def get( + self, + subj: SubjectType = None, + pred: PredicateType = None, + obj: ObjectType = None, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + infer: bool = True, + content_type: str | None = None, + ) -> Graph | Dataset: + """Get RDF statements from the repository matching the filtering parameters. + + Args: + subj: Subject of the statement. + pred: Predicate of the statement. + obj: Object of the statement. + graph_name: Graph name(s) to restrict to. + + Default value `None` queries all graphs. + + To query just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + infer: Specifies whether inferred statements should be included in the result. + content_type: The content type of the response. + A triple-based format returns a [Graph][rdflib.graph.Graph], while a + quad-based format returns a [`Dataset`][rdflib.graph.Dataset]. + + Returns: + A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + if content_type is None: + content_type = "application/n-quads" + headers = {"Accept": content_type} + params = {} + build_context_param(params, graph_name) + if subj is not None: + params["subj"] = subj.n3() + if pred is not None: + params["pred"] = pred.n3() + if obj is not None: + params["obj"] = obj.n3() + if not infer: + params["infer"] = "false" + try: - headers = {"Accept": content_type} response = self.http_client.get( - f"/repositories/{self.identifier}/statements", headers=headers + f"/repositories/{self.identifier}/statements", + headers=headers, + params=params, ) response.raise_for_status() triple_formats = [ @@ -106,9 +159,12 @@ def get(self, content_type: str = "application/n-quads") -> str | Graph | Datase "text/turtle", "application/rdf+xml", ] - if content_type in triple_formats: - return Graph().parse(data=response.text, format=content_type) - return Dataset().parse(data=response.text, format=content_type) + try: + if content_type in triple_formats: + return Graph().parse(data=response.text, format=content_type) + return Dataset().parse(data=response.text, format=content_type) + except Exception as err: + raise RDFLibParserError(f"Error parsing RDF: {err}") from err except (httpx.RequestError, httpx.HTTPStatusError): raise @@ -147,17 +203,7 @@ def overwrite( try: headers = {"Content-Type": content_type} params = {} - if graph_name is not None and isinstance(graph_name, IdentifiedNode): - if graph_name == DATASET_DEFAULT_GRAPH_ID: - # Special RDF4J null value for context-less statements. - params["context"] = "null" - else: - params["context"] = graph_name.n3() - elif graph_name is not None and isinstance(graph_name, str): - params["context"] = URIRef(graph_name).n3() - elif graph_name is not None and isinstance(graph_name, Iterable): - graph_names = ",".join([x.n3() for x in graph_name]) - params["context"] = graph_names + build_context_param(params, graph_name) if base_uri is not None: params["baseURI"] = base_uri @@ -224,7 +270,7 @@ def list(self) -> list[RepositoryListingResult]: def get(self, repository_id: str) -> Repository: """Get a repository by ID. - !!! note + !!! Note This performs a health check before returning the repository object. Parameters: diff --git a/rdflib/contrib/rdf4j/exceptions.py b/rdflib/contrib/rdf4j/exceptions.py index 30ab2b87f..a59b6d221 100644 --- a/rdflib/contrib/rdf4j/exceptions.py +++ b/rdflib/contrib/rdf4j/exceptions.py @@ -23,3 +23,7 @@ class RepositoryAlreadyExistsError(RepositoryError): class RDF4JUnsupportedProtocolError(Exception): """Raised when the server does not support the protocol version.""" + + +class RDFLibParserError(Exception): + """Raised when there is an error parsing the RDF document.""" diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py new file mode 100644 index 000000000..ff2f621a1 --- /dev/null +++ b/rdflib/contrib/rdf4j/util.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +import typing as t + +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.term import IdentifiedNode, URIRef + + +def build_context_param( + params: dict[str, str], + graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None = None, +) -> None: + """Build the RDF4J http context param. + + !!! Note + This mutates the params dictionary key `context`. + + Args: + params: The `httpx.Request` parameter dictionary. + graph_name: The graph name or iterable of graph names. + """ + if graph_name is not None and isinstance(graph_name, IdentifiedNode): + if graph_name == DATASET_DEFAULT_GRAPH_ID: + # Special RDF4J null value for context-less statements. + params["context"] = "null" + else: + params["context"] = graph_name.n3() + elif graph_name is not None and isinstance(graph_name, str): + params["context"] = URIRef(graph_name).n3() + elif graph_name is not None and isinstance(graph_name, t.Iterable): + graph_names = ",".join([x.n3() for x in graph_name]) + params["context"] = graph_names diff --git a/test/test_rdf4j/unit/repository/test_repo_get_method.py b/test/test_rdf4j/unit/repository/test_repo_get_method.py new file mode 100644 index 000000000..a131e490b --- /dev/null +++ b/test/test_rdf4j/unit/repository/test_repo_get_method.py @@ -0,0 +1,201 @@ +from __future__ import annotations + +import typing as t +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib import Dataset, Graph +from rdflib.contrib.rdf4j.client import ( + ObjectType, + PredicateType, + Repository, + SubjectType, +) +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.term import BNode, IdentifiedNode, URIRef + + +@pytest.mark.parametrize( + "content_type, data, expected_class_type", + [ + [ + None, + " .", + Dataset, + ], + [ + "application/trig", + " { . }", + Dataset, + ], + [ + "application/n-triples", + " .", + Graph, + ], + [ + "text/turtle", + " .", + Graph, + ], + [ + "application/rdf+xml", + """ + + + + + +""", + Graph, + ], + ], +) +def test_repo_content_type( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + content_type: str | None, + data: str, + expected_class_type: type, +): + """ + Test that the content type is set correctly on the request and that the response is + parsed correctly. + """ + mock_response = Mock(spec=httpx.Response, text=data) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + + result = repo.get(content_type=content_type) + headers = {"Accept": content_type or "application/n-quads"} + params = {} + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + ) + assert isinstance(result, expected_class_type) + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_repo_get_graph_name( + repo: Repository, + graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + expected_graph_name_param: str, + monkeypatch: pytest.MonkeyPatch, +): + """ + Test that graph_name is passed as a query parameter and correctly handles the + different type variations. + """ + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + headers = { + "Accept": "application/n-quads", + } + if graph_name is None: + params = {} + else: + params = {"context": expected_graph_name_param} + repo.get(graph_name=graph_name) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + ) + + +@pytest.mark.parametrize("infer, expected_value", [[True, KeyError], [False, "false"]]) +def test_repo_get_infer( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + infer: bool, + expected_value: Exception | str, +): + """Test that the "infer" query parameter is set correctly.""" + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + headers = { + "Accept": "application/n-quads", + } + + params = {} + if isinstance(expected_value, str): + params["infer"] = expected_value + + repo.get(infer=infer) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + ) + + +@pytest.mark.parametrize( + "subj, pred, obj, expected_params", + [ + [ + URIRef("http://example.com/s"), + URIRef("http://example.com/p"), + URIRef("http://example.com/o"), + { + "subj": "", + "pred": "", + "obj": "", + }, + ], + [None, None, None, {}], + [ + BNode("some-bnode"), + URIRef("http://example.com/p"), + BNode("some-bnode-2"), + { + "subj": "_:some-bnode", + "pred": "", + "obj": "_:some-bnode-2", + }, + ], + ], +) +def test_repo_get_spo( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + subj: SubjectType, + pred: PredicateType, + obj: ObjectType, + expected_params: dict[str, str], +): + """Test that the subj, pred, and obj query parameters are set correctly.""" + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + headers = { + "Accept": "application/n-quads", + } + + repo.get(subj=subj, pred=pred, obj=obj) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=expected_params, + ) diff --git a/test/test_rdf4j/unit/util/test_rdf4j_util.py b/test/test_rdf4j/unit/util/test_rdf4j_util.py new file mode 100644 index 000000000..63ee02085 --- /dev/null +++ b/test/test_rdf4j/unit/util/test_rdf4j_util.py @@ -0,0 +1,28 @@ +import pytest + +from rdflib.contrib.rdf4j.util import build_context_param +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.term import BNode, URIRef + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_build_context_param(graph_name, expected_graph_name_param): + params = {} + build_context_param(params, graph_name) + if graph_name is None: + assert "context" not in params + else: + assert params["context"] == expected_graph_name_param From ced7451a78c5e9487923c66c72a52f60b3aec412 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Mon, 3 Nov 2025 14:59:31 +1000 Subject: [PATCH 15/54] feat: Repository delete method --- rdflib/contrib/rdf4j/client.py | 62 ++++++++--- rdflib/contrib/rdf4j/util.py | 49 ++++++++- test/test_rdf4j/{e2e => test_e2e}/conftest.py | 0 .../{e2e => test_e2e}/docker/Dockerfile | 0 .../{e2e => test_e2e}/docker/settings.txt | 0 .../{e2e => test_e2e}/docker/users.txt | 0 .../repo-configs/test-repo-config.ttl | 0 .../{e2e => test_e2e}/test_client.py | 0 .../test_graphdb/conftest.py | 0 .../test_graphdb_repo_management.py | 0 .../{e2e => test_e2e}/test_repo_management.py | 0 .../repository/conftest.py | 0 .../repository/test_repo_delete_method.py | 100 ++++++++++++++++++ .../repository/test_repo_get_method.py | 0 .../repository/test_repo_overwrite_method.py | 0 .../util/test_rdf4j_util.py | 0 16 files changed, 197 insertions(+), 14 deletions(-) rename test/test_rdf4j/{e2e => test_e2e}/conftest.py (100%) rename test/test_rdf4j/{e2e => test_e2e}/docker/Dockerfile (100%) rename test/test_rdf4j/{e2e => test_e2e}/docker/settings.txt (100%) rename test/test_rdf4j/{e2e => test_e2e}/docker/users.txt (100%) rename test/test_rdf4j/{e2e => test_e2e}/repo-configs/test-repo-config.ttl (100%) rename test/test_rdf4j/{e2e => test_e2e}/test_client.py (100%) rename test/test_rdf4j/{e2e => test_e2e}/test_graphdb/conftest.py (100%) rename test/test_rdf4j/{e2e => test_e2e}/test_graphdb/test_graphdb_repo_management.py (100%) rename test/test_rdf4j/{e2e => test_e2e}/test_repo_management.py (100%) rename test/test_rdf4j/{unit => test_unit}/repository/conftest.py (100%) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_delete_method.py rename test/test_rdf4j/{unit => test_unit}/repository/test_repo_get_method.py (100%) rename test/test_rdf4j/{unit => test_unit}/repository/test_repo_overwrite_method.py (100%) rename test/test_rdf4j/{unit => test_unit}/util/test_rdf4j_util.py (100%) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index c00f77047..ee755659f 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -11,13 +11,18 @@ from rdflib.contrib.rdf4j.exceptions import ( RDF4JUnsupportedProtocolError, + RDFLibParserError, RepositoryAlreadyExistsError, RepositoryError, RepositoryFormatError, RepositoryNotFoundError, - RepositoryNotHealthyError, RDFLibParserError, + RepositoryNotHealthyError, +) +from rdflib.contrib.rdf4j.util import ( + build_context_param, + build_infer_param, + build_spo_param, ) -from rdflib.contrib.rdf4j.util import build_context_param from rdflib.graph import Dataset, Graph from rdflib.term import IdentifiedNode, Literal, URIRef @@ -111,9 +116,9 @@ def get( """Get RDF statements from the repository matching the filtering parameters. Args: - subj: Subject of the statement. - pred: Predicate of the statement. - obj: Object of the statement. + subj: Subject of the statement to filter by, or `None` to match all. + pred: Predicate of the statement to filter by, or `None` to match all. + obj: Object of the statement to filter by, or `None` to match all. graph_name: Graph name(s) to restrict to. Default value `None` queries all graphs. @@ -138,14 +143,8 @@ def get( headers = {"Accept": content_type} params = {} build_context_param(params, graph_name) - if subj is not None: - params["subj"] = subj.n3() - if pred is not None: - params["pred"] = pred.n3() - if obj is not None: - params["obj"] = obj.n3() - if not infer: - params["infer"] = "false" + build_spo_param(params, subj, pred, obj) + build_infer_param(params, infer=infer) try: response = self.http_client.get( @@ -220,6 +219,43 @@ def overwrite( if should_close: stream.close() + def delete( + self, + subj: SubjectType = None, + pred: PredicateType = None, + obj: ObjectType = None, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + ) -> None: + """Deletes statements from the repository matching the filtering parameters. + + Args: + subj: Subject of the statement to filter by, or `None` to match all. + pred: Predicate of the statement to filter by, or `None` to match all. + obj: Object of the statement to filter by, or `None` to match all. + graph_name: Graph name(s) to restrict to. + + Default value `None` queries all graphs. + + To query just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + params = {} + build_context_param(params, graph_name) + build_spo_param(params, subj, pred, obj) + + try: + response = self.http_client.delete( + f"/repositories/{self.identifier}/statements", + params=params, + ) + response.raise_for_status() + except (httpx.RequestError, httpx.HTTPStatusError): + raise + class RepositoryManager: """A client to manage server-level repository operations.""" diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index ff2f621a1..294c11f2b 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -5,12 +5,15 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import IdentifiedNode, URIRef +if t.TYPE_CHECKING: + from rdflib.contrib.rdf4j.client import ObjectType, PredicateType, SubjectType + def build_context_param( params: dict[str, str], graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None = None, ) -> None: - """Build the RDF4J http context param. + """Build the RDF4J http context query parameters dictionary. !!! Note This mutates the params dictionary key `context`. @@ -18,6 +21,8 @@ def build_context_param( Args: params: The `httpx.Request` parameter dictionary. graph_name: The graph name or iterable of graph names. + + This is the `context` query parameter value. """ if graph_name is not None and isinstance(graph_name, IdentifiedNode): if graph_name == DATASET_DEFAULT_GRAPH_ID: @@ -30,3 +35,45 @@ def build_context_param( elif graph_name is not None and isinstance(graph_name, t.Iterable): graph_names = ",".join([x.n3() for x in graph_name]) params["context"] = graph_names + + +def build_spo_param( + params: dict[str, str], + subj: SubjectType = None, + pred: PredicateType = None, + obj: ObjectType = None, +) -> None: + """Build the RDF4J http subj, predicate, and object query parameters dictionary. + + !!! Note + This mutates the params dictionary key `subj`, `pred`, and `obj`. + + Args: + params: The `httpx.Request` parameter dictionary. + subj: The `subj` query parameter value. + pred: The `pred` query parameter value.. + obj: The `obj` query parameter value. + """ + if subj is not None: + params["subj"] = subj.n3() + if pred is not None: + params["pred"] = pred.n3() + if obj is not None: + params["obj"] = obj.n3() + + +def build_infer_param( + params: dict[str, str], + infer: bool = True, +) -> None: + """Build the RDF4J http infer query parameters dictionary. + + !!! Note + This mutates the params dictionary key `infer`. + + Args: + params: The `httpx.Request` parameter dictionary. + infer: The `infer` query parameter value. + """ + if not infer: + params["infer"] = "false" diff --git a/test/test_rdf4j/e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py similarity index 100% rename from test/test_rdf4j/e2e/conftest.py rename to test/test_rdf4j/test_e2e/conftest.py diff --git a/test/test_rdf4j/e2e/docker/Dockerfile b/test/test_rdf4j/test_e2e/docker/Dockerfile similarity index 100% rename from test/test_rdf4j/e2e/docker/Dockerfile rename to test/test_rdf4j/test_e2e/docker/Dockerfile diff --git a/test/test_rdf4j/e2e/docker/settings.txt b/test/test_rdf4j/test_e2e/docker/settings.txt similarity index 100% rename from test/test_rdf4j/e2e/docker/settings.txt rename to test/test_rdf4j/test_e2e/docker/settings.txt diff --git a/test/test_rdf4j/e2e/docker/users.txt b/test/test_rdf4j/test_e2e/docker/users.txt similarity index 100% rename from test/test_rdf4j/e2e/docker/users.txt rename to test/test_rdf4j/test_e2e/docker/users.txt diff --git a/test/test_rdf4j/e2e/repo-configs/test-repo-config.ttl b/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl similarity index 100% rename from test/test_rdf4j/e2e/repo-configs/test-repo-config.ttl rename to test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl diff --git a/test/test_rdf4j/e2e/test_client.py b/test/test_rdf4j/test_e2e/test_client.py similarity index 100% rename from test/test_rdf4j/e2e/test_client.py rename to test/test_rdf4j/test_e2e/test_client.py diff --git a/test/test_rdf4j/e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py similarity index 100% rename from test/test_rdf4j/e2e/test_graphdb/conftest.py rename to test/test_rdf4j/test_e2e/test_graphdb/conftest.py diff --git a/test/test_rdf4j/e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py similarity index 100% rename from test/test_rdf4j/e2e/test_graphdb/test_graphdb_repo_management.py rename to test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py diff --git a/test/test_rdf4j/e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py similarity index 100% rename from test/test_rdf4j/e2e/test_repo_management.py rename to test/test_rdf4j/test_e2e/test_repo_management.py diff --git a/test/test_rdf4j/unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py similarity index 100% rename from test/test_rdf4j/unit/repository/conftest.py rename to test/test_rdf4j/test_unit/repository/conftest.py diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete_method.py b/test/test_rdf4j/test_unit/repository/test_repo_delete_method.py new file mode 100644 index 000000000..06e996098 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_delete_method.py @@ -0,0 +1,100 @@ +from __future__ import annotations + +import typing as t +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + ObjectType, + PredicateType, + Repository, + SubjectType, +) +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.term import BNode, IdentifiedNode, URIRef + + +@pytest.mark.parametrize( + "subj, pred, obj, expected_params", + [ + [ + URIRef("http://example.com/s"), + URIRef("http://example.com/p"), + URIRef("http://example.com/o"), + { + "subj": "", + "pred": "", + "obj": "", + }, + ], + [None, None, None, {}], + [ + BNode("some-bnode"), + URIRef("http://example.com/p"), + BNode("some-bnode-2"), + { + "subj": "_:some-bnode", + "pred": "", + "obj": "_:some-bnode-2", + }, + ], + ], +) +def test_repo_delete_spo( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + subj: SubjectType, + pred: PredicateType, + obj: ObjectType, + expected_params: dict[str, str], +): + """Test that the subj, pred, and obj query parameters are set correctly.""" + mock_response = Mock(spec=httpx.Response) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_get) + + repo.delete(subj=subj, pred=pred, obj=obj) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + params=expected_params, + ) + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_repo_delete_graph_name( + repo: Repository, + graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + expected_graph_name_param: str, + monkeypatch: pytest.MonkeyPatch, +): + """ + Test that graph_name is passed as a query parameter and correctly handles the + different type variations. + """ + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_get) + if graph_name is None: + params = {} + else: + params = {"context": expected_graph_name_param} + repo.delete(graph_name=graph_name) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/statements", + params=params, + ) diff --git a/test/test_rdf4j/unit/repository/test_repo_get_method.py b/test/test_rdf4j/test_unit/repository/test_repo_get_method.py similarity index 100% rename from test/test_rdf4j/unit/repository/test_repo_get_method.py rename to test/test_rdf4j/test_unit/repository/test_repo_get_method.py diff --git a/test/test_rdf4j/unit/repository/test_repo_overwrite_method.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite_method.py similarity index 100% rename from test/test_rdf4j/unit/repository/test_repo_overwrite_method.py rename to test/test_rdf4j/test_unit/repository/test_repo_overwrite_method.py diff --git a/test/test_rdf4j/unit/util/test_rdf4j_util.py b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py similarity index 100% rename from test/test_rdf4j/unit/util/test_rdf4j_util.py rename to test/test_rdf4j/test_unit/util/test_rdf4j_util.py From ce4af9941d3f9b78d034b064bc09fc9d3d5b2195 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Mon, 3 Nov 2025 16:08:15 +1000 Subject: [PATCH 16/54] feat: Repository size method --- rdflib/contrib/rdf4j/client.py | 40 +++++++++ ...o_delete_method.py => test_repo_delete.py} | 0 ...st_repo_get_method.py => test_repo_get.py} | 2 +- ...write_method.py => test_repo_overwrite.py} | 0 .../test_unit/repository/test_repo_size.py | 81 +++++++++++++++++++ 5 files changed, 122 insertions(+), 1 deletion(-) rename test/test_rdf4j/test_unit/repository/{test_repo_delete_method.py => test_repo_delete.py} (100%) rename test/test_rdf4j/test_unit/repository/{test_repo_get_method.py => test_repo_get.py} (100%) rename test/test_rdf4j/test_unit/repository/{test_repo_overwrite_method.py => test_repo_overwrite.py} (100%) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_size.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index ee755659f..399ced363 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -104,6 +104,46 @@ def health(self) -> bool: except httpx.RequestError: raise + def size( + self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None + ) -> int: + """The number of statements in the repository or in the specified graph name. + + Args: + graph_name: Graph name(s) to restrict to. + + Default value `None` queries all graphs. + + To query just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + Returns: + The number of statements. + + Raises: + RepositoryFormatError: Fails to parse the repository size. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + params = {} + build_context_param(params, graph_name) + try: + response = self.http_client.get( + f"/repositories/{self.identifier}/size", params=params + ) + response.raise_for_status() + try: + value = int(response.text) + if value >= 0: + return value + raise ValueError(f"Invalid repository size: {value}") + except ValueError as err: + raise RepositoryFormatError( + f"Failed to parse repository size: {err}" + ) from err + except (httpx.RequestError, httpx.HTTPStatusError): + raise + def get( self, subj: SubjectType = None, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete_method.py b/test/test_rdf4j/test_unit/repository/test_repo_delete.py similarity index 100% rename from test/test_rdf4j/test_unit/repository/test_repo_delete_method.py rename to test/test_rdf4j/test_unit/repository/test_repo_delete.py diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get_method.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py similarity index 100% rename from test/test_rdf4j/test_unit/repository/test_repo_get_method.py rename to test/test_rdf4j/test_unit/repository/test_repo_get.py index a131e490b..701215fac 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get_method.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -98,9 +98,9 @@ def test_repo_content_type( ) def test_repo_get_graph_name( repo: Repository, + monkeypatch: pytest.MonkeyPatch, graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, expected_graph_name_param: str, - monkeypatch: pytest.MonkeyPatch, ): """ Test that graph_name is passed as a query parameter and correctly handles the diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite_method.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py similarity index 100% rename from test/test_rdf4j/test_unit/repository/test_repo_overwrite_method.py rename to test/test_rdf4j/test_unit/repository/test_repo_overwrite.py diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py new file mode 100644 index 000000000..ac9f182a8 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import typing as t +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Repository, +) +from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.term import BNode, IdentifiedNode, URIRef + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_repo_size_graph_name( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + expected_graph_name_param: str, +): + """ + Test that graph_name is passed as a query parameter and correctly handles the + different type variations. + """ + mock_response = Mock(spec=httpx.Response, text="0") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + if graph_name is None: + params = {} + else: + params = {"context": expected_graph_name_param} + size = repo.size(graph_name=graph_name) + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/size", + params=params, + ) + assert size == 0 + + +@pytest.mark.parametrize( + "response_value, expected_parsed_value", + [ + ["0", 0], + ["123", 123], + ["-100", RepositoryFormatError], + ["foo", RepositoryFormatError], + ], +) +def test_repo_size_values( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + response_value: str, + expected_parsed_value: int | type[RepositoryFormatError], +): + """Test that the return value of the response is correctly parsed.""" + mock_response = Mock(spec=httpx.Response, text=response_value) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + + if isinstance(expected_parsed_value, int): + size = repo.size() + assert size == expected_parsed_value + else: + with pytest.raises(expected_parsed_value): + repo.size() From d9258150dea5c6f214e62003e789cd1ec59a178b Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Mon, 3 Nov 2025 16:26:58 +1000 Subject: [PATCH 17/54] test: add e2e tests for the new Repository methods --- test/test_rdf4j/data/quads-2.nq | 1 + .../test_e2e/test_repo_management.py | 40 +++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 test/test_rdf4j/data/quads-2.nq diff --git a/test/test_rdf4j/data/quads-2.nq b/test/test_rdf4j/data/quads-2.nq new file mode 100644 index 000000000..cbd5b8631 --- /dev/null +++ b/test/test_rdf4j/data/quads-2.nq @@ -0,0 +1 @@ + . diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index 28cd77e50..b25b27cfb 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -10,6 +10,7 @@ RepositoryNotFoundError, RepositoryNotHealthyError, ) +from rdflib import Dataset, URIRef # TODO: only run these tests on py39 or greater. Testcontainers not available on py38. @@ -60,6 +61,45 @@ def test_repo_manager_crud(client: RDF4JClient): with pytest.raises(RepositoryAlreadyExistsError): client.repositories.create("test-repo", config) + # Confirm repo is empty + assert repo.size() == 0 + ds = repo.get() + assert isinstance(ds, Dataset) + assert len(ds) == 0 + + # Use the overwrite method to add statements to the repo + with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file: + repo.overwrite(file, "application/n-quads") + assert repo.size() == 1 + ds = repo.get() + assert len(ds) == 1 + str_result = ds.serialize(format="nquads") + assert " ." in str_result + + # Overwrite with a different file. + with open(pathlib.Path(__file__).parent.parent / "data/quads-1.nq", "rb") as file: + repo.overwrite(file, "application/n-quads") + assert repo.size() == 2 + ds = repo.get() + assert len(ds) == 2 + str_result = ds.serialize(format="nquads") + assert " ." in str_result + assert " ." in str_result + + # Get statements using a filter pattern + ds = repo.get(subj=URIRef("http://example.org/s2")) + assert len(ds) == 1 + str_result = ds.serialize(format="nquads") + assert " ." in str_result + + # Use the delete method to delete a statement using a filter pattern + repo.delete(subj=URIRef("http://example.org/s")) + assert repo.size() == 1 + ds = repo.get() + assert len(ds) == 1 + str_result = ds.serialize(format="nquads") + assert " ." in str_result + # Delete repository client.repositories.delete("test-repo") assert client.repositories.list() == [] From a7ec08fe5d4401ca464b8409bc82e55a6bb3ad48 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 11:38:54 +1000 Subject: [PATCH 18/54] feat: add Repository upload and graphs method --- rdflib/contrib/rdf4j/client.py | 105 ++++++--- rdflib/contrib/rdf4j/util.py | 55 ++++- test/test_rdf4j/data/quads-3.nq | 1 + .../test_e2e/test_repo_management.py | 38 ++- .../repository/test_repo_overwrite.py | 46 +++- .../test_unit/repository/test_repo_upload.py | 216 ++++++++++++++++++ .../test_unit/util/test_rdf4j_util.py | 34 ++- 7 files changed, 445 insertions(+), 50 deletions(-) create mode 100644 test/test_rdf4j/data/quads-3.nq create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_upload.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 399ced363..43ce6402f 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -9,6 +9,7 @@ import httpx +from rdflib import BNode from rdflib.contrib.rdf4j.exceptions import ( RDF4JUnsupportedProtocolError, RDFLibParserError, @@ -21,7 +22,7 @@ from rdflib.contrib.rdf4j.util import ( build_context_param, build_infer_param, - build_spo_param, + build_spo_param, rdf_payload_to_stream, ) from rdflib.graph import Dataset, Graph from rdflib.term import IdentifiedNode, Literal, URIRef @@ -144,6 +145,44 @@ def size( except (httpx.RequestError, httpx.HTTPStatusError): raise + def graphs(self) -> list[IdentifiedNode]: + """Get a list of all graph names in the repository. + + Returns: + A list of graph names. + + Raises: + RepositoryFormatError: Fails to parse the repository graph names. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + try: + headers = { + "Accept": "application/sparql-results+json", + } + response = self.http_client.get( + f"/repositories/{self.identifier}/contexts", headers=headers + ) + response.raise_for_status() + try: + values = [] + for row in response.json()["results"]["bindings"]: + value = row["contextID"]["value"] + value_type = row["contextID"]["type"] + if value_type == "uri": + values.append(URIRef(value)) + elif value_type == "bnode": + values.append(BNode(value)) + else: + raise ValueError(f"Invalid graph name type: {value_type}") + return values + except Exception as err: + raise RepositoryFormatError( + f"Failed to parse repository graph names: {err}" + ) from err + except (httpx.RequestError, httpx.HTTPStatusError): + raise + def get( self, subj: SubjectType = None, @@ -207,46 +246,51 @@ def get( except (httpx.RequestError, httpx.HTTPStatusError): raise - def upload(self, data, content_type: str, base_uri: str): - raise NotImplementedError + # TODO: This only covers appending statements to a repository. + # We still need to implement sparql update and transaction document. + def upload( + self, + data: str | bytes | BinaryIO | Graph | Dataset, + base_uri: str | None = None, + content_type: str | None = None, + ): + """Upload and append statements to the repository.""" + # TODO: docstring + stream, should_close = rdf_payload_to_stream(data) + try: + headers = {"Content-Type": content_type or "application/n-quads"} + params = {} + if base_uri is not None: + params["baseURI"] = base_uri + response = self.http_client.post( + f"/repositories/{self.identifier}/statements", + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + except (httpx.RequestError, httpx.HTTPStatusError): + raise + finally: + if should_close: + stream.close() def overwrite( self, - data: str | bytes | BinaryIO, - content_type: str, + data: str | bytes | BinaryIO | Graph | Dataset, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, base_uri: str | None = None, + content_type: str | None = None, ): - if isinstance(data, str): - # Check if it looks like a file path. Assumes file path length is less than 260. - if "\n" not in data and len(data) < 260: - try: - stream = open(data, "rb") - should_close = True - except (FileNotFoundError, OSError): - # Treat as raw string content - stream = io.BytesIO(data.encode("utf-8")) - should_close = False - else: - # Treat as raw string content - stream = io.BytesIO(data.encode("utf-8")) - should_close = False - elif isinstance(data, bytes): - stream = io.BytesIO(data) - should_close = False - else: - # Assume it's already a file-like object - stream = data - should_close = False + # TODO: Add docstring. + stream, should_close = rdf_payload_to_stream(data) try: - headers = {"Content-Type": content_type} + headers = {"Content-Type": content_type or "application/n-quads"} params = {} build_context_param(params, graph_name) - if base_uri is not None: params["baseURI"] = base_uri - response = self.http_client.put( f"/repositories/{self.identifier}/statements", headers=headers, @@ -254,7 +298,8 @@ def overwrite( content=stream, ) response.raise_for_status() - + except (httpx.RequestError, httpx.HTTPStatusError): + raise finally: if should_close: stream.close() diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index 294c11f2b..5b78910f2 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -1,8 +1,9 @@ from __future__ import annotations +import io import typing as t -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.term import IdentifiedNode, URIRef if t.TYPE_CHECKING: @@ -51,7 +52,7 @@ def build_spo_param( Args: params: The `httpx.Request` parameter dictionary. subj: The `subj` query parameter value. - pred: The `pred` query parameter value.. + pred: The `pred` query parameter value. obj: The `obj` query parameter value. """ if subj is not None: @@ -77,3 +78,53 @@ def build_infer_param( """ if not infer: params["infer"] = "false" + + +def rdf_payload_to_stream( + data: str | bytes | t.BinaryIO | Graph | Dataset, +) -> tuple[t.BinaryIO, bool]: + """Convert an RDF payload into a file-like object. + + Args: + data: The RDF payload. + + This can be a python `str`, `bytes`, `BinaryIO`, or a + [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset]. + + Returns: + A tuple containing the file-like object and a boolean indicating whether the + immediate caller should close the stream. + """ + if isinstance(data, str): + # Check if it looks like a file path. Assumes file path length is less than 260. + if "\n" not in data and len(data) < 260: + try: + stream = open(data, "rb") + should_close = True + except (FileNotFoundError, OSError): + # Treat as raw string content + stream = io.BytesIO(data.encode("utf-8")) + should_close = False + else: + # Treat as raw string content + stream = io.BytesIO(data.encode("utf-8")) + should_close = False + elif isinstance(data, bytes): + stream = io.BytesIO(data) + should_close = False + elif isinstance(data, (Graph, Dataset)): + if data.context_aware: + stream = io.BytesIO( + data.serialize(format="application/n-quads", encoding="utf-8") + ) + else: + stream = io.BytesIO( + data.serialize(format="application/n-triples", encoding="utf-8") + ) + should_close = True + else: + # Assume it's already a file-like object + stream = data + should_close = False + + return stream, should_close diff --git a/test/test_rdf4j/data/quads-3.nq b/test/test_rdf4j/data/quads-3.nq new file mode 100644 index 000000000..39ca50092 --- /dev/null +++ b/test/test_rdf4j/data/quads-3.nq @@ -0,0 +1 @@ +_:b-test _:c . diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index b25b27cfb..0b96fa4a7 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -3,6 +3,7 @@ import httpx import pytest +from rdflib import Dataset, URIRef from rdflib.contrib.rdf4j import RDF4JClient from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, @@ -10,7 +11,6 @@ RepositoryNotFoundError, RepositoryNotHealthyError, ) -from rdflib import Dataset, URIRef # TODO: only run these tests on py39 or greater. Testcontainers not available on py38. @@ -71,10 +71,16 @@ def test_repo_manager_crud(client: RDF4JClient): with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file: repo.overwrite(file, "application/n-quads") assert repo.size() == 1 + graphs = repo.graphs() + assert len(graphs) == 1 + assert any(value in graphs for value in [URIRef("urn:graph:a3")]) ds = repo.get() assert len(ds) == 1 str_result = ds.serialize(format="nquads") - assert " ." in str_result + assert ( + " ." + in str_result + ) # Overwrite with a different file. with open(pathlib.Path(__file__).parent.parent / "data/quads-1.nq", "rb") as file: @@ -82,15 +88,29 @@ def test_repo_manager_crud(client: RDF4JClient): assert repo.size() == 2 ds = repo.get() assert len(ds) == 2 + graphs = repo.graphs() + assert len(graphs) == 2 + assert any( + value in graphs for value in [URIRef("urn:graph:a"), URIRef("urn:graph:b")] + ) str_result = ds.serialize(format="nquads") - assert " ." in str_result - assert " ." in str_result + assert ( + " ." + in str_result + ) + assert ( + " ." + in str_result + ) # Get statements using a filter pattern ds = repo.get(subj=URIRef("http://example.org/s2")) assert len(ds) == 1 str_result = ds.serialize(format="nquads") - assert " ." in str_result + assert ( + " ." + in str_result + ) # Use the delete method to delete a statement using a filter pattern repo.delete(subj=URIRef("http://example.org/s")) @@ -98,7 +118,13 @@ def test_repo_manager_crud(client: RDF4JClient): ds = repo.get() assert len(ds) == 1 str_result = ds.serialize(format="nquads") - assert " ." in str_result + assert ( + " ." + in str_result + ) + + # Append to the repository a new RDF payload with blank node graph names + # TODO: # Delete repository client.repositories.delete("test-repo") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index 373735f2f..872d690fb 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -8,11 +8,37 @@ import httpx import pytest -from rdflib import BNode, IdentifiedNode, URIRef +from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +@pytest.mark.parametrize("class_type", [Graph, Dataset]) +def test_repo_overwrite_graph( + repo: Repository, monkeypatch: pytest.MonkeyPatch, class_type: type[Graph | Dataset] +): + """Test that the overwrite method handles Graphs and Datasets as data input.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + graph = class_type().parse(file_path) + repo.overwrite(graph) + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert content.closed + + def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch): """Test that a file path is treated as a file to be read and closed when done.""" file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" @@ -22,7 +48,7 @@ def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPa "Content-Type": "application/n-quads", } params = {} - repo.overwrite(str(file_path), "application/n-quads") + repo.overwrite(str(file_path), content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -49,7 +75,7 @@ def test_repo_overwrite_buffered_reader( "Content-Type": "application/n-quads", } params = {} - repo.overwrite(file, "application/n-quads") + repo.overwrite(file, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -78,7 +104,7 @@ def test_repo_overwrite_data( "Content-Type": "application/n-quads", } params = {} - repo.overwrite(data, "application/n-quads") + repo.overwrite(data, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -121,7 +147,7 @@ def test_repo_overwrite_graph_name( params = {} else: params = {"context": expected_graph_name_param} - repo.overwrite("", "application/n-quads", graph_name) + repo.overwrite("", graph_name=graph_name, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -150,7 +176,7 @@ def test_repo_overwrite_base_uri( headers = { "Content-Type": "application/n-quads", } - repo.overwrite("", "application/n-quads", base_uri=base_uri) + repo.overwrite("", base_uri=base_uri, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -170,7 +196,7 @@ def test_repo_overwrite_nonexistent_file_path( } params = {} nonexistent_path = "/nonexistent/path/file.nq" - repo.overwrite(nonexistent_path, "application/n-quads") + repo.overwrite(nonexistent_path, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -194,7 +220,7 @@ def test_repo_overwrite_string_with_newline( } params = {} data_with_newline = " .\n ." - repo.overwrite(data_with_newline, "application/n-quads") + repo.overwrite(data_with_newline, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -217,7 +243,7 @@ def test_repo_overwrite_long_string(repo: Repository, monkeypatch: pytest.Monkey params = {} # Create a string longer than 260 characters long_string = "a" * 261 - repo.overwrite(long_string, "application/n-quads") + repo.overwrite(long_string, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, @@ -245,9 +271,9 @@ def test_repo_overwrite_graph_name_and_base_uri( } repo.overwrite( "", - "application/n-quads", graph_name="http://example.com/graph", base_uri="http://example.com/base", + content_type="application/n-quads", ) mock.assert_called_once_with( "/repositories/test-repo/statements", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py new file mode 100644 index 000000000..8d2ee4e56 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -0,0 +1,216 @@ +from __future__ import annotations + +import io +import pathlib +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib import Dataset, Graph +from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.parametrize("class_type", [Graph, Dataset]) +def test_repo_upload_graph( + repo: Repository, monkeypatch: pytest.MonkeyPatch, class_type: type[Graph | Dataset] +): + """Test that the upload method handles Graphs and Datasets as data input.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + graph = class_type().parse(file_path) + repo.upload(graph) + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert content.closed + + +def test_repo_upload_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch): + """Test that a file path is treated as a file to be read and closed when done.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.upload(str(file_path), content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert hasattr(content, "read") + assert hasattr(content, "name") + assert content.name == str(file_path) + assert content.closed + + +def test_repo_upload_buffered_reader( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a file-like object is read and not closed when done.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + with open(file_path, "rb") as file: + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.upload(file, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=file, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert not content.closed + + +@pytest.mark.parametrize( + "data", + [ + " .", + b" .", + ], +) +def test_repo_upload_data( + repo: Repository, data: str | bytes, monkeypatch: pytest.MonkeyPatch +): + """Test that str and bytes data is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + repo.upload(data, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + + +@pytest.mark.parametrize( + "base_uri, expected_params", + [ + ["", {"baseURI": ""}], + ["http://example.com", {"baseURI": "http://example.com"}], + [None, {}], + ], +) +def test_repo_upload_base_uri( + repo: Repository, + base_uri: str | None, + expected_params: dict[str, str], + monkeypatch: pytest.MonkeyPatch, +): + """Test that base_uri is passed as a query parameter.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + repo.upload("", base_uri=base_uri, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=expected_params, + content=ANY, + ) + + +def test_repo_upload_nonexistent_file_path( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a string that looks like a file path but doesn't exist is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + nonexistent_path = "/nonexistent/path/file.nq" + repo.upload(nonexistent_path, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_upload_string_with_newline( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + """Test that a string with newlines is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + data_with_newline = " .\n ." + repo.upload(data_with_newline, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_upload_long_string(repo: Repository, monkeypatch: pytest.MonkeyPatch): + """Test that a string longer than 260 characters is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "post", mock) + headers = { + "Content-Type": "application/n-quads", + } + params = {} + # Create a string longer than 260 characters + long_string = "a" * 261 + repo.upload(long_string, content_type="application/n-quads") + mock.assert_called_once_with( + "/repositories/test-repo/statements", + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed diff --git a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py index 63ee02085..ea4d4e5b6 100644 --- a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py +++ b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py @@ -1,7 +1,12 @@ +from __future__ import annotations + +import io +import typing as t + import pytest -from rdflib.contrib.rdf4j.util import build_context_param -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.contrib.rdf4j.util import build_context_param, rdf_payload_to_stream +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.term import BNode, URIRef @@ -26,3 +31,28 @@ def test_build_context_param(graph_name, expected_graph_name_param): assert "context" not in params else: assert params["context"] == expected_graph_name_param + + +@pytest.mark.parametrize( + "data, expected_value_type, expected_should_close", + [ + [ + open("test/test_rdf4j/test_unit/repository/test_repo_delete.py", "rb"), + io.BufferedReader, + False, + ], + ["", io.BytesIO, False], + [b"", io.BytesIO, False], + [io.BytesIO(b""), io.BytesIO, False], + [Graph(), io.BytesIO, True], + [Dataset(), io.BytesIO, True], + ], +) +def test_rdf_payload_to_stream( + data: str | bytes | t.BinaryIO | Graph | Dataset, + expected_value_type: type[io.BufferedIOBase | io.RawIOBase], + expected_should_close: bool, +): + value, should_close = rdf_payload_to_stream(data) + assert isinstance(value, expected_value_type) + assert should_close == expected_should_close From 39f1adcba0793af2c99447efc4a1b56a5beef54f Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 12:17:16 +1000 Subject: [PATCH 19/54] test: fix up and improve existing tests --- rdflib/contrib/rdf4j/client.py | 4 +-- test/test_rdf4j/data/quads-3.nq | 2 +- .../test_e2e/test_repo_management.py | 26 ++++++++++++++++--- .../test_unit/repository/test_repo_size.py | 5 ++++ .../test_unit/repository/test_repo_upload.py | 5 +--- 5 files changed, 31 insertions(+), 11 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 43ce6402f..cb81a0e69 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -2,7 +2,6 @@ from __future__ import annotations -import io import typing as t from dataclasses import dataclass from typing import Any, BinaryIO, Iterable @@ -22,7 +21,8 @@ from rdflib.contrib.rdf4j.util import ( build_context_param, build_infer_param, - build_spo_param, rdf_payload_to_stream, + build_spo_param, + rdf_payload_to_stream, ) from rdflib.graph import Dataset, Graph from rdflib.term import IdentifiedNode, Literal, URIRef diff --git a/test/test_rdf4j/data/quads-3.nq b/test/test_rdf4j/data/quads-3.nq index 39ca50092..924f94d9e 100644 --- a/test/test_rdf4j/data/quads-3.nq +++ b/test/test_rdf4j/data/quads-3.nq @@ -1 +1 @@ -_:b-test _:c . +_:b-test _:c _:graph . diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index 0b96fa4a7..f91683f27 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -3,7 +3,8 @@ import httpx import pytest -from rdflib import Dataset, URIRef +from rdflib import BNode, Dataset, URIRef +from rdflib.compare import isomorphic from rdflib.contrib.rdf4j import RDF4JClient from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, @@ -69,7 +70,7 @@ def test_repo_manager_crud(client: RDF4JClient): # Use the overwrite method to add statements to the repo with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file: - repo.overwrite(file, "application/n-quads") + repo.overwrite(file) assert repo.size() == 1 graphs = repo.graphs() assert len(graphs) == 1 @@ -84,7 +85,7 @@ def test_repo_manager_crud(client: RDF4JClient): # Overwrite with a different file. with open(pathlib.Path(__file__).parent.parent / "data/quads-1.nq", "rb") as file: - repo.overwrite(file, "application/n-quads") + repo.overwrite(file) assert repo.size() == 2 ds = repo.get() assert len(ds) == 2 @@ -124,7 +125,24 @@ def test_repo_manager_crud(client: RDF4JClient): ) # Append to the repository a new RDF payload with blank node graph names - # TODO: + with open(pathlib.Path(__file__).parent.parent / "data/quads-3.nq", "rb") as file: + repo.upload(file) + assert repo.size() == 2 + ds = repo.get() + assert len(ds) == 2 + graphs = repo.graphs() + assert len(graphs) == 2 + assert any( + value in graphs + for value in [URIRef("urn:graph:a"), URIRef("urn:graph:b"), BNode("c")] + ) + data = """ + . + _:b-test _:c _:graph . + """ + ds2 = Dataset().parse(data=data, format="nquads") + for graph in ds.graphs(): + assert any(isomorphic(graph, graph2) for graph2 in ds2.graphs()) # Delete repository client.repositories.delete("test-repo") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py index ac9f182a8..45d890614 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -79,3 +79,8 @@ def test_repo_size_values( else: with pytest.raises(expected_parsed_value): repo.size() + + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/size", + params={}, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py index 8d2ee4e56..2f0522e85 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -61,9 +61,7 @@ def test_repo_upload_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch assert content.closed -def test_repo_upload_buffered_reader( - repo: Repository, monkeypatch: pytest.MonkeyPatch -): +def test_repo_upload_buffered_reader(repo: Repository, monkeypatch: pytest.MonkeyPatch): """Test that a file-like object is read and not closed when done.""" file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" mock = Mock() @@ -115,7 +113,6 @@ def test_repo_upload_data( assert not content.closed - @pytest.mark.parametrize( "base_uri, expected_params", [ From d5182acefa903812e3648597258c733a586e8366 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 12:17:28 +1000 Subject: [PATCH 20/54] test: add tests for Repository.graphs --- .../test_unit/repository/test_repo_graphs.py | 102 ++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_graphs.py diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py new file mode 100644 index 000000000..b5cc6ef13 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Repository +from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError +from rdflib.term import BNode, IdentifiedNode, URIRef + + +@pytest.mark.parametrize( + "response_dict, expected_result", + [ + [{"results": {"bindings": []}}, set()], + [ + { + "results": { + "bindings": [ + { + "contextID": { + "value": "http://example.com/graph", + "type": "uri", + } + } + ] + } + }, + {URIRef("http://example.com/graph")}, + ], + [ + { + "results": { + "bindings": [{"contextID": {"value": "bnode1", "type": "bnode"}}] + } + }, + {BNode("bnode1")}, + ], + [ + { + "results": { + "bindings": [ + {"contextID": {"value": "bnode1", "type": "bnode"}}, + {"contextID": {"value": "urn:blah", "type": "uri"}}, + ] + } + }, + {BNode("bnode1"), URIRef("urn:blah")}, + ], + ], +) +def test_repo_graphs( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + response_dict: dict, + expected_result: set[IdentifiedNode], +): + """Test that the graphs are returned correctly.""" + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + result = repo.graphs() + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/contexts", + headers={"Accept": "application/sparql-results+json"}, + ) + assert set(result) == expected_result + + +@pytest.mark.parametrize( + "response_dict, expected_error", + [ + [{}, RepositoryFormatError], + [ + { + "results": { + "bindings": [ + {"contextID": {"type": "invalid", "value": "urn:example"}} + ] + } + }, + RepositoryFormatError, + ], + ], +) +def test_repo_graphs_invalid_response( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + response_dict: dict, + expected_error: type[Exception], +): + """Test that an error is raised when the response is invalid.""" + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + with pytest.raises(expected_error): + repo.graphs() + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/contexts", + headers={"Accept": "application/sparql-results+json"}, + ) From 3282124ab78bb09fa4928bcf625386b86ef04d11 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 12:27:06 +1000 Subject: [PATCH 21/54] chore: add docstring to overwrite and upload methods --- rdflib/contrib/rdf4j/client.py | 44 +++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index cb81a0e69..06764bd35 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -110,10 +110,10 @@ def size( ) -> int: """The number of statements in the repository or in the specified graph name. - Args: + Parameters: graph_name: Graph name(s) to restrict to. - Default value `None` queries all graphs. + The default value `None` queries all graphs. To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. @@ -194,13 +194,13 @@ def get( ) -> Graph | Dataset: """Get RDF statements from the repository matching the filtering parameters. - Args: + Parameters: subj: Subject of the statement to filter by, or `None` to match all. pred: Predicate of the statement to filter by, or `None` to match all. obj: Object of the statement to filter by, or `None` to match all. graph_name: Graph name(s) to restrict to. - Default value `None` queries all graphs. + The default value `None` queries all graphs. To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. @@ -254,8 +254,18 @@ def upload( base_uri: str | None = None, content_type: str | None = None, ): - """Upload and append statements to the repository.""" - # TODO: docstring + """Upload and append statements to the repository. + + Parameters: + data: The RDF data to upload. + base_uri: The base URI to resolve against for any relative URIs in the data. + content_type: The content type of the data. + Defaults to `application/n-quads`. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ stream, should_close = rdf_payload_to_stream(data) try: headers = {"Content-Type": content_type or "application/n-quads"} @@ -282,7 +292,23 @@ def overwrite( base_uri: str | None = None, content_type: str | None = None, ): - # TODO: Add docstring. + """Upload and overwrite statements in the repository. + + Parameters: + data: The RDF data to upload. + graph_name: Graph name(s) to restrict to. + + The default value `None` applies to all graphs. + + To apply to just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + base_uri: The base URI to resolve against for any relative URIs in the data. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ stream, should_close = rdf_payload_to_stream(data) try: @@ -313,13 +339,13 @@ def delete( ) -> None: """Deletes statements from the repository matching the filtering parameters. - Args: + Parameters: subj: Subject of the statement to filter by, or `None` to match all. pred: Predicate of the statement to filter by, or `None` to match all. obj: Object of the statement to filter by, or `None` to match all. graph_name: Graph name(s) to restrict to. - Default value `None` queries all graphs. + The default value `None` queries all graphs. To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. From c44058e87b24343c8cb1c858bf61fdf487691539 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 13:58:32 +1000 Subject: [PATCH 22/54] feat: defer repository manager creation until accessed, and also amend some docstring content --- rdflib/contrib/rdf4j/client.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 06764bd35..ffd3f41c8 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -259,8 +259,8 @@ def upload( Parameters: data: The RDF data to upload. base_uri: The base URI to resolve against for any relative URIs in the data. - content_type: The content type of the data. - Defaults to `application/n-quads`. + content_type: The content type of the data. Defaults to + `application/n-quads` when the value is `None`. Raises: httpx.RequestError: On network/connection issues. @@ -304,6 +304,8 @@ def overwrite( [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. base_uri: The base URI to resolve against for any relative URIs in the data. + content_type: The content type of the data. Defaults to + `application/n-quads` when the value is `None`. Raises: httpx.RequestError: On network/connection issues. @@ -525,7 +527,7 @@ def __init__( raise RDF4JUnsupportedProtocolError( f"RDF4J server protocol version {self.protocol} is not supported. Minimum required version is 12." ) - self._repository_manager = RepositoryManager(self.http_client) + self._repository_manager = None def __enter__(self): return self @@ -540,6 +542,8 @@ def http_client(self): @property def repositories(self): """Server-level repository management operations.""" + if self._repository_manager is None: + self._repository_manager = RepositoryManager(self.http_client) return self._repository_manager @property From 73ed890d250eae206b98cee22bbbc474a254f7b5 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 15:16:07 +1000 Subject: [PATCH 23/54] feat: add RDF4J NamespaceManager --- rdflib/contrib/rdf4j/client.py | 173 +++++++++++++++++- .../repository/test_repo_namespace_clear.py | 19 ++ .../repository/test_repo_namespace_get.py | 49 +++++ .../repository/test_repo_namespace_list.py | 71 +++++++ .../repository/test_repo_namespace_remove.py | 40 ++++ .../repository/test_repo_namespace_set.py | 47 +++++ 6 files changed, 398 insertions(+), 1 deletion(-) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index ffd3f41c8..d6cad95af 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -32,6 +32,167 @@ ObjectType = t.Union[IdentifiedNode, Literal, None] +@dataclass(frozen=True) +class NamespaceListingResult: + """RDF4J namespace and prefix name result.""" + + prefix: str + namespace: str + + +class NamespaceManager: + """A namespace manager for RDF4J repositories. + + Parameters: + identifier: The identifier of the repository. + http_client: The httpx.Client instance. + """ + + def __init__(self, identifier: str, http_client: httpx.Client): + self._identifier = identifier + self._http_client = http_client + + @property + def http_client(self): + return self._http_client + + @property + def identifier(self): + """Repository identifier.""" + return self._identifier + + def list(self) -> list[NamespaceListingResult]: + """List all namespace declarations in the repository. + + Returns: + list[NamespaceListingResult]: List of namespace and prefix name results. + + Raises: + RepositoryFormatError: If the response format is unrecognized. + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + headers = { + "Accept": "application/sparql-results+json", + } + try: + response = self.http_client.get( + f"/repositories/{self.identifier}/namespaces", headers=headers + ) + response.raise_for_status() + + try: + data = response.json() + results = data["results"]["bindings"] + return [ + NamespaceListingResult( + prefix=row["prefix"]["value"], + namespace=row["namespace"]["value"], + ) + for row in results + ] + except (KeyError, ValueError) as err: + raise RepositoryFormatError(f"Unrecognised response format: {err}") + except (httpx.RequestError, httpx.HTTPStatusError): + raise + + def clear(self): + """Clear all namespace declarations in the repository. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + headers = { + "Accept": "application/sparql-results+json", + } + try: + response = self.http_client.delete( + f"/repositories/{self.identifier}/namespaces", headers=headers + ) + response.raise_for_status() + except (httpx.RequestError, httpx.HTTPStatusError): + raise + + def get(self, prefix: str) -> str | None: + """Get the namespace URI for a given prefix. + + Parameters: + prefix: The prefix to lookup. + + Returns: + The namespace URI or `None` if not found. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + if not prefix: + raise ValueError("Prefix cannot be empty.") + headers = { + "Accept": "text/plain", + } + try: + response = self.http_client.get( + f"/repositories/{self.identifier}/namespaces/{prefix}", headers=headers + ) + response.raise_for_status() + return response.text + except httpx.HTTPStatusError as err: + if err.response.status_code == 404: + return None + raise + except httpx.RequestError: + raise + + def set(self, prefix: str, namespace: str): + """Set the namespace URI for a given prefix. + + !!! note + If the prefix was previously mapped to a different namespace, this will be overwritten. + + Parameters: + prefix: The prefix to set. + namespace: The namespace URI to set. + + Raises: + httpx.RequestError: On network/connection issues. + httpx.HTTPStatusError: Unhandled status code error. + """ + if not prefix: + raise ValueError("Prefix cannot be empty.") + if not namespace: + raise ValueError("Namespace cannot be empty.") + headers = { + "Content-Type": "text/plain", + } + try: + response = self.http_client.put( + f"/repositories/{self.identifier}/namespaces/{prefix}", + headers=headers, + content=namespace, + ) + response.raise_for_status() + except (httpx.RequestError, httpx.HTTPStatusError): + raise + + def remove(self, prefix: str): + """Remove the namespace declaration for a given prefix. + + Parameters: + prefix: The prefix to remove. + """ + if not prefix: + raise ValueError("Prefix cannot be empty.") + try: + response = self.http_client.delete( + f"/repositories/{self.identifier}/namespaces/{prefix}" + ) + response.raise_for_status() + except (httpx.RequestError, httpx.HTTPStatusError): + raise + + @dataclass(frozen=True) class RepositoryListingResult: """RDF4J repository listing result. @@ -62,6 +223,7 @@ class Repository: def __init__(self, identifier: str, http_client: httpx.Client): self._identifier = identifier self._http_client = http_client + self._namespace_manager: NamespaceManager | None = None @property def http_client(self): @@ -72,6 +234,15 @@ def identifier(self): """Repository identifier.""" return self._identifier + @property + def namespaces(self) -> NamespaceManager: + """Namespace manager for the repository.""" + if self._namespace_manager is None: + self._namespace_manager = NamespaceManager( + self.identifier, self.http_client + ) + return self._namespace_manager + def health(self) -> bool: """Repository health check. @@ -540,7 +711,7 @@ def http_client(self): return self._http_client @property - def repositories(self): + def repositories(self) -> RepositoryManager: """Server-level repository management operations.""" if self._repository_manager is None: self._repository_manager = RepositoryManager(self.http_client) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py new file mode 100644 index 000000000..73bf61ae7 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Repository + + +def test_repo_namespace_clear(repo: Repository, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response) + mock_httpx_delete = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete) + repo.namespaces.clear() + mock_httpx_delete.assert_called_once_with( + "/repositories/test-repo/namespaces", + headers={"Accept": "application/sparql-results+json"}, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py new file mode 100644 index 000000000..0ab10583e --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.parametrize( + "prefix, response_text, response_status_code, expected_value", + [ + [ + "skos", + "http://www.w3.org/2004/02/skos/core#", + 200, + "http://www.w3.org/2004/02/skos/core#", + ], + ["non-existent", "Undefined prefix: non-existent", 404, None], + ], +) +def test_repo_namespace_get( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + prefix: str, + response_text: str, + response_status_code: int, + expected_value: str | None, +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + repo.namespaces.get(prefix) + mock_httpx_get.assert_called_once_with( + f"/repositories/test-repo/namespaces/{prefix}", + headers={"Accept": "text/plain"}, + ) + + +@pytest.mark.parametrize("prefix", [None, ""]) +def test_repo_namespace_get_error( + repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str | None +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + with pytest.raises(ValueError): + repo.namespaces.get(prefix) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py new file mode 100644 index 000000000..e77227d80 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository +from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError +from rdflib.term import IdentifiedNode + + +@pytest.mark.parametrize( + "response_dict, expected_result", + [ + [{"results": {"bindings": []}}, set()], + [ + { + "results": { + "bindings": [ + { + "prefix": {"value": "test"}, + "namespace": {"value": "http://example.com/test/"}, + }, + { + "prefix": {"value": "test2"}, + "namespace": {"value": "http://example.com/test2/"}, + }, + ] + } + }, + { + NamespaceListingResult( + prefix="test", namespace="http://example.com/test/" + ), + NamespaceListingResult( + prefix="test2", namespace="http://example.com/test2/" + ), + }, + ], + ], +) +def test_repo_namespace_list( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + response_dict: dict, + expected_result: set[IdentifiedNode], +): + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + result = repo.namespaces.list() + assert set(result) == expected_result + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/namespaces", + headers={"Accept": "application/sparql-results+json"}, + ) + + +def test_repo_namespace_list_error(repo: Repository, monkeypatch: pytest.MonkeyPatch): + response_dict = {} + + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + with pytest.raises(RepositoryFormatError): + repo.namespaces.list() + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/namespaces", + headers={"Accept": "application/sparql-results+json"}, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py new file mode 100644 index 000000000..7f98e0e5a --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.parametrize( + "prefix", + [ + ["skos"], + ["schema"], + ], +) +def test_repo_namespace_remove( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + prefix: str, +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_remove = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove) + repo.namespaces.remove(prefix) + mock_httpx_remove.assert_called_once_with( + f"/repositories/test-repo/namespaces/{prefix}", + ) + + +@pytest.mark.parametrize("prefix", [None, ""]) +def test_repo_namespace_remove_error( + repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str | None +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_remove = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove) + with pytest.raises(ValueError): + repo.namespaces.remove(prefix) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py new file mode 100644 index 000000000..541d2ed5a --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.parametrize( + "prefix, namespace", + [["test", "http://example.com/test"], ["test2", "http://example.com/test2"]], +) +def test_repo_namespace_set( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + prefix: str, + namespace: str, +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + repo.namespaces.set(prefix, namespace) + mock_httpx_put.assert_called_once_with( + f"/repositories/test-repo/namespaces/{prefix}", + headers={"Content-Type": "text/plain"}, + content=namespace, + ) + + +@pytest.mark.parametrize( + "prefix, namespace", + [ + [None, "http://example.com/test"], + ["test", None], + ["", "http://example.com/test"], + ["test", ""], + [None, None], + ["", ""], + ], +) +def test_repo_namespace_set_error( + repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str, namespace: str +): + with pytest.raises(ValueError): + repo.namespaces.set(prefix, namespace) From fca2b9971bac65281c1504153ce017c45680b2a6 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 15:35:18 +1000 Subject: [PATCH 24/54] chore: fix mypy issues --- rdflib/contrib/rdf4j/client.py | 12 ++++++------ rdflib/contrib/rdf4j/util.py | 4 +++- test/test_rdf4j/test_e2e/test_repo_management.py | 1 + .../test_unit/repository/test_repo_get.py | 2 +- .../repository/test_repo_namespace_get.py | 2 +- .../repository/test_repo_namespace_list.py | 2 +- .../repository/test_repo_namespace_remove.py | 2 +- .../test_unit/repository/test_repo_overwrite.py | 16 ++++++++-------- .../test_unit/repository/test_repo_upload.py | 14 +++++++------- 9 files changed, 29 insertions(+), 26 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index d6cad95af..ed604e580 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -297,7 +297,7 @@ def size( httpx.RequestError: On network/connection issues. httpx.HTTPStatusError: Unhandled status code error. """ - params = {} + params: dict[str, str] = {} build_context_param(params, graph_name) try: response = self.http_client.get( @@ -336,7 +336,7 @@ def graphs(self) -> list[IdentifiedNode]: ) response.raise_for_status() try: - values = [] + values: list[IdentifiedNode] = [] for row in response.json()["results"]["bindings"]: value = row["contextID"]["value"] value_type = row["contextID"]["type"] @@ -391,7 +391,7 @@ def get( if content_type is None: content_type = "application/n-quads" headers = {"Accept": content_type} - params = {} + params: dict[str, str] = {} build_context_param(params, graph_name) build_spo_param(params, subj, pred, obj) build_infer_param(params, infer=infer) @@ -486,7 +486,7 @@ def overwrite( try: headers = {"Content-Type": content_type or "application/n-quads"} - params = {} + params: dict[str, str] = {} build_context_param(params, graph_name) if base_uri is not None: params["baseURI"] = base_uri @@ -527,7 +527,7 @@ def delete( httpx.RequestError: On network/connection issues. httpx.HTTPStatusError: Unhandled status code error. """ - params = {} + params: dict[str, str] = {} build_context_param(params, graph_name) build_spo_param(params, subj, pred, obj) @@ -698,7 +698,7 @@ def __init__( raise RDF4JUnsupportedProtocolError( f"RDF4J server protocol version {self.protocol} is not supported. Minimum required version is 12." ) - self._repository_manager = None + self._repository_manager: RepositoryManager | None = None def __enter__(self): return self diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index 5b78910f2..3da15f165 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -34,7 +34,8 @@ def build_context_param( elif graph_name is not None and isinstance(graph_name, str): params["context"] = URIRef(graph_name).n3() elif graph_name is not None and isinstance(graph_name, t.Iterable): - graph_names = ",".join([x.n3() for x in graph_name]) + # type error: "str" has no attribute "n3" + graph_names = ",".join([x.n3() for x in graph_name]) # type: ignore[attr-defined] params["context"] = graph_names @@ -95,6 +96,7 @@ def rdf_payload_to_stream( A tuple containing the file-like object and a boolean indicating whether the immediate caller should close the stream. """ + stream: t.BinaryIO if isinstance(data, str): # Check if it looks like a file path. Assumes file path length is less than 260. if "\n" not in data and len(data) < 260: diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index f91683f27..162ea8130 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -141,6 +141,7 @@ def test_repo_manager_crud(client: RDF4JClient): _:b-test _:c _:graph . """ ds2 = Dataset().parse(data=data, format="nquads") + assert isinstance(ds, Dataset) for graph in ds.graphs(): assert any(isomorphic(graph, graph2) for graph2 in ds2.graphs()) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index 701215fac..913c44de8 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -73,7 +73,7 @@ def test_repo_content_type( result = repo.get(content_type=content_type) headers = {"Accept": content_type or "application/n-quads"} - params = {} + params: dict[str, str] = {} mock_httpx_get.assert_called_once_with( "/repositories/test-repo/statements", headers=headers, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index 0ab10583e..ee19d5bf9 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -46,4 +46,4 @@ def test_repo_namespace_get_error( mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) with pytest.raises(ValueError): - repo.namespaces.get(prefix) + repo.namespaces.get(prefix) # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py index e77227d80..b7f52c663 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py @@ -58,7 +58,7 @@ def test_repo_namespace_list( def test_repo_namespace_list_error(repo: Repository, monkeypatch: pytest.MonkeyPatch): - response_dict = {} + response_dict: dict[str, str] = {} mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) mock_httpx_get = Mock(return_value=mock_response) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py index 7f98e0e5a..5f88f73b2 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -37,4 +37,4 @@ def test_repo_namespace_remove_error( mock_httpx_remove = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove) with pytest.raises(ValueError): - repo.namespaces.remove(prefix) + repo.namespaces.remove(prefix) # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index 872d690fb..755c7db52 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -24,7 +24,7 @@ def test_repo_overwrite_graph( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} graph = class_type().parse(file_path) repo.overwrite(graph) mock.assert_called_once_with( @@ -47,7 +47,7 @@ def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPa headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.overwrite(str(file_path), content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -74,7 +74,7 @@ def test_repo_overwrite_buffered_reader( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.overwrite(file, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -103,7 +103,7 @@ def test_repo_overwrite_data( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.overwrite(data, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -144,7 +144,7 @@ def test_repo_overwrite_graph_name( "Content-Type": "application/n-quads", } if graph_name is None: - params = {} + params: dict[str, str] = {} else: params = {"context": expected_graph_name_param} repo.overwrite("", graph_name=graph_name, content_type="application/n-quads") @@ -194,7 +194,7 @@ def test_repo_overwrite_nonexistent_file_path( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} nonexistent_path = "/nonexistent/path/file.nq" repo.overwrite(nonexistent_path, content_type="application/n-quads") mock.assert_called_once_with( @@ -218,7 +218,7 @@ def test_repo_overwrite_string_with_newline( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} data_with_newline = " .\n ." repo.overwrite(data_with_newline, content_type="application/n-quads") mock.assert_called_once_with( @@ -240,7 +240,7 @@ def test_repo_overwrite_long_string(repo: Repository, monkeypatch: pytest.Monkey headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} # Create a string longer than 260 characters long_string = "a" * 261 repo.overwrite(long_string, content_type="application/n-quads") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py index 2f0522e85..5486dc946 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -22,7 +22,7 @@ def test_repo_upload_graph( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} graph = class_type().parse(file_path) repo.upload(graph) mock.assert_called_once_with( @@ -45,7 +45,7 @@ def test_repo_upload_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.upload(str(file_path), content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -70,7 +70,7 @@ def test_repo_upload_buffered_reader(repo: Repository, monkeypatch: pytest.Monke headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.upload(file, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -99,7 +99,7 @@ def test_repo_upload_data( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} repo.upload(data, content_type="application/n-quads") mock.assert_called_once_with( "/repositories/test-repo/statements", @@ -151,7 +151,7 @@ def test_repo_upload_nonexistent_file_path( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} nonexistent_path = "/nonexistent/path/file.nq" repo.upload(nonexistent_path, content_type="application/n-quads") mock.assert_called_once_with( @@ -175,7 +175,7 @@ def test_repo_upload_string_with_newline( headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} data_with_newline = " .\n ." repo.upload(data_with_newline, content_type="application/n-quads") mock.assert_called_once_with( @@ -197,7 +197,7 @@ def test_repo_upload_long_string(repo: Repository, monkeypatch: pytest.MonkeyPat headers = { "Content-Type": "application/n-quads", } - params = {} + params: dict[str, str] = {} # Create a string longer than 260 characters long_string = "a" * 261 repo.upload(long_string, content_type="application/n-quads") From 0c89e592c1fcde1a409cfab802648e493b99dee0 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 15:47:12 +1000 Subject: [PATCH 25/54] test: add e2e tests for RDF4J Repository NamespaceManager --- rdflib/contrib/rdf4j/util.py | 2 +- test/test_rdf4j/test_e2e/conftest.py | 11 ++++ .../test_e2e/test_repo_management.py | 10 +--- .../test_e2e/test_repo_namespace.py | 51 +++++++++++++++++++ .../repository/test_repo_namespace_get.py | 2 +- .../repository/test_repo_namespace_remove.py | 2 +- 6 files changed, 67 insertions(+), 11 deletions(-) create mode 100644 test/test_rdf4j/test_e2e/test_repo_namespace.py diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index 3da15f165..1dff0e56f 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -35,7 +35,7 @@ def build_context_param( params["context"] = URIRef(graph_name).n3() elif graph_name is not None and isinstance(graph_name, t.Iterable): # type error: "str" has no attribute "n3" - graph_names = ",".join([x.n3() for x in graph_name]) # type: ignore[attr-defined] + graph_names = ",".join([x.n3() for x in graph_name]) # type: ignore[attr-defined] params["context"] = graph_names diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py index 9592afeac..a960785d4 100644 --- a/test/test_rdf4j/test_e2e/conftest.py +++ b/test/test_rdf4j/test_e2e/conftest.py @@ -26,3 +26,14 @@ def client(graphdb_container: DockerContainer): port = graphdb_container.get_exposed_port(7200) with RDF4JClient(f"http://localhost:{port}/", auth=("admin", "admin")) as client: yield client + + +@pytest.fixture(scope="function") +def repo(client: RDF4JClient): + config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + yield repo diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index 162ea8130..a9601315f 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -6,6 +6,7 @@ from rdflib import BNode, Dataset, URIRef from rdflib.compare import isomorphic from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j.client import Repository from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, RepositoryFormatError, @@ -155,14 +156,7 @@ def test_repo_manager_crud(client: RDF4JClient): @pytest.mark.testcontainer -def test_repo_not_healthy(client: RDF4JClient, monkeypatch): - config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" - with open(config_path) as file: - config = file.read() - - repo = client.repositories.create("test-repo", config) - assert repo.identifier == "test-repo" - +def test_repo_not_healthy(repo: Repository, monkeypatch: pytest.MonkeyPatch): class MockResponse: def raise_for_status(self): raise httpx.HTTPStatusError( diff --git a/test/test_rdf4j/test_e2e/test_repo_namespace.py b/test/test_rdf4j/test_e2e/test_repo_namespace.py new file mode 100644 index 000000000..4f7f774d2 --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_repo_namespace.py @@ -0,0 +1,51 @@ +import pytest + +from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository + + +@pytest.mark.testcontainer +def test_e2e_repo_namespace_crud(repo: Repository): + assert repo.namespaces.list() == [] + + # Delete a non-existent prefix + repo.namespaces.remove("non-existent") + + # Retrieve a non-existent prefix + assert repo.namespaces.get("non-existent") is None + + # Set a new prefix + repo.namespaces.set("test", "http://example.org/test/") + assert set(repo.namespaces.list()) == { + NamespaceListingResult(prefix="test", namespace="http://example.org/test/") + } + assert repo.namespaces.get("test") == "http://example.org/test/" + + # Set another + repo.namespaces.set("test2", "http://example.org/test2/") + assert set(repo.namespaces.list()) == { + NamespaceListingResult(prefix="test", namespace="http://example.org/test/"), + NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/"), + } + assert repo.namespaces.get("test2") == "http://example.org/test2/" + + # Update an existing prefix (overwrite) + repo.namespaces.set("test", "http://example.org/test-updated/") + assert set(repo.namespaces.list()) == { + NamespaceListingResult(prefix="test", namespace="http://example.org/test-updated/"), + NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/"), + } + assert repo.namespaces.get("test") == "http://example.org/test-updated/" + + # Delete test prefix + repo.namespaces.remove("test") + assert set(repo.namespaces.list()) == { + NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/") + } + assert repo.namespaces.get("test") is None + assert repo.namespaces.get("test2") == "http://example.org/test2/" + + # Clear + repo.namespaces.clear() + assert repo.namespaces.list() == [] + assert repo.namespaces.get("test") is None + assert repo.namespaces.get("test2") is None diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index ee19d5bf9..03c670922 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -46,4 +46,4 @@ def test_repo_namespace_get_error( mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) with pytest.raises(ValueError): - repo.namespaces.get(prefix) # type: ignore + repo.namespaces.get(prefix) # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py index 5f88f73b2..18a69c96f 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -37,4 +37,4 @@ def test_repo_namespace_remove_error( mock_httpx_remove = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove) with pytest.raises(ValueError): - repo.namespaces.remove(prefix) # type: ignore + repo.namespaces.remove(prefix) # type: ignore From b51f4f3e048476f8238271d1567494970bee04f0 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 16:08:22 +1000 Subject: [PATCH 26/54] chore: doc improvements --- rdflib/contrib/rdf4j/client.py | 6 ++++-- rdflib/contrib/rdf4j/util.py | 2 ++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index ed604e580..bb74f8d44 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -376,13 +376,15 @@ def get( To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. - infer: Specifies whether inferred statements should be included in the result. + infer: Specifies whether inferred statements should be included in the + result. content_type: The content type of the response. A triple-based format returns a [Graph][rdflib.graph.Graph], while a quad-based format returns a [`Dataset`][rdflib.graph.Dataset]. Returns: - A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object. + A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object + with the repository namespace prefixes bound to it. Raises: httpx.RequestError: On network/connection issues. diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index 1dff0e56f..d6eefc1ec 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -1,3 +1,5 @@ +"""RDF4J utility functions.""" + from __future__ import annotations import io From c7a05c832fa4a9c9b0cbcd3f61a16e0adfcf15d6 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Tue, 4 Nov 2025 16:25:57 +1000 Subject: [PATCH 27/54] feat: the repository's namespace prefixes are now bound to the return object of Repository.get() method --- rdflib/contrib/rdf4j/client.py | 8 ++++-- .../test_e2e/test_repo_namespace.py | 4 ++- .../test_unit/repository/test_repo_get.py | 5 ++++ .../repository/test_repo_namespace_get.py | 25 ++++++++++++++++++- 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index bb74f8d44..c775292a0 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -412,8 +412,12 @@ def get( ] try: if content_type in triple_formats: - return Graph().parse(data=response.text, format=content_type) - return Dataset().parse(data=response.text, format=content_type) + retval = Graph().parse(data=response.text, format=content_type) + else: + retval = Dataset().parse(data=response.text, format=content_type) + for result in self.namespaces.list(): + retval.bind(result.prefix, result.namespace, replace=True) + return retval except Exception as err: raise RDFLibParserError(f"Error parsing RDF: {err}") from err except (httpx.RequestError, httpx.HTTPStatusError): diff --git a/test/test_rdf4j/test_e2e/test_repo_namespace.py b/test/test_rdf4j/test_e2e/test_repo_namespace.py index 4f7f774d2..5ea93bad8 100644 --- a/test/test_rdf4j/test_e2e/test_repo_namespace.py +++ b/test/test_rdf4j/test_e2e/test_repo_namespace.py @@ -31,7 +31,9 @@ def test_e2e_repo_namespace_crud(repo: Repository): # Update an existing prefix (overwrite) repo.namespaces.set("test", "http://example.org/test-updated/") assert set(repo.namespaces.list()) == { - NamespaceListingResult(prefix="test", namespace="http://example.org/test-updated/"), + NamespaceListingResult( + prefix="test", namespace="http://example.org/test-updated/" + ), NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/"), } assert repo.namespaces.get("test") == "http://example.org/test-updated/" diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index 913c44de8..ec151e27c 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -8,6 +8,7 @@ from rdflib import Dataset, Graph from rdflib.contrib.rdf4j.client import ( + NamespaceManager, ObjectType, PredicateType, Repository, @@ -70,6 +71,7 @@ def test_repo_content_type( mock_response = Mock(spec=httpx.Response, text=data) mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) result = repo.get(content_type=content_type) headers = {"Accept": content_type or "application/n-quads"} @@ -109,6 +111,7 @@ def test_repo_get_graph_name( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -135,6 +138,7 @@ def test_repo_get_infer( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -189,6 +193,7 @@ def test_repo_get_spo( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index 03c670922..8f0f66ee2 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -5,7 +5,12 @@ import httpx import pytest -from rdflib.contrib.rdf4j.client import Repository +from rdflib import Dataset, URIRef +from rdflib.contrib.rdf4j.client import ( + NamespaceListingResult, + NamespaceManager, + Repository, +) @pytest.mark.parametrize( @@ -47,3 +52,21 @@ def test_repo_namespace_get_error( monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) with pytest.raises(ValueError): repo.namespaces.get(prefix) # type: ignore + + +def test_repo_get_with_namespace_binding( + repo: Repository, monkeypatch: pytest.MonkeyPatch +): + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + monkeypatch.setattr( + NamespaceManager, + "list", + lambda _: [ + NamespaceListingResult(prefix="test", namespace="http://example.org/test/") + ], + ) + ds = repo.get() + assert isinstance(ds, Dataset) + assert ("test", URIRef("http://example.org/test/")) in set(ds.namespaces()) From b9596c26bc9577cf1c847e6ec4332171ee2d8594 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Wed, 5 Nov 2025 15:45:15 +1000 Subject: [PATCH 28/54] feat: add Repository GraphStoreManager --- rdflib/contrib/rdf4j/client.py | 157 +++++++++++++++++- .../test_e2e/test_repo_management.py | 6 +- .../repository/test_repo_graph_store_add.py | 46 +++++ .../repository/test_repo_graph_store_clear.py | 42 +++++ .../repository/test_repo_graph_store_get.py | 47 ++++++ .../test_repo_graph_store_overwrite.py | 46 +++++ .../test_unit/repository/test_repo_graphs.py | 4 +- 7 files changed, 340 insertions(+), 8 deletions(-) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index c775292a0..17a99486d 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -24,7 +24,7 @@ build_spo_param, rdf_payload_to_stream, ) -from rdflib.graph import Dataset, Graph +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.term import IdentifiedNode, Literal, URIRef SubjectType = t.Union[IdentifiedNode, None] @@ -149,7 +149,8 @@ def set(self, prefix: str, namespace: str): """Set the namespace URI for a given prefix. !!! note - If the prefix was previously mapped to a different namespace, this will be overwritten. + If the prefix was previously mapped to a different namespace, this will be + overwritten. Parameters: prefix: The prefix to set. @@ -193,6 +194,146 @@ def remove(self, prefix: str): raise +class GraphStoreManager: + """An RDF4J Graph Store Protocol Client. + + Parameters: + identifier: The identifier of the repository. + http_client: The httpx.Client instance. + """ + + def __init__(self, identifier: str, http_client: httpx.Client): + self._identifier = identifier + self._http_client = http_client + self._content_type = "application/n-triples" + + @property + def http_client(self): + return self._http_client + + @property + def identifier(self): + """Repository identifier.""" + return self._identifier + + @staticmethod + def _build_graph_name_params(graph_name: URIRef | str): + params = {} + if isinstance(graph_name, URIRef) and graph_name == DATASET_DEFAULT_GRAPH_ID: + params["default"] = "" + else: + params["graph"] = str(graph_name) + return params + + def get(self, graph_name: URIRef | str) -> Graph: + """Fetch all statements in the specified graph. + + Parameters: + graph_name: The graph name of the graph. + + For the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + Returns: + A [`Graph`][rdflib.graph.Graph] object containing all statements in the + graph. + """ + if not graph_name: + raise ValueError("Graph name must be provided.") + headers = { + "Accept": self._content_type, + } + params = self._build_graph_name_params(graph_name) + + response = self.http_client.get( + f"/repositories/{self.identifier}/rdf-graphs/service", + headers=headers, + params=params, + ) + response.raise_for_status() + + return Graph(identifier=graph_name).parse( + data=response.text, format=self._content_type + ) + + def add(self, graph_name: str, data: str | bytes | BinaryIO | Graph): + """Add statements to the specified graph. + + Parameters: + graph_name: The graph name of the graph. + + For the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + data: The RDF data to add. + """ + if not graph_name: + raise ValueError("Graph name must be provided.") + stream, should_close = rdf_payload_to_stream(data) + headers = { + "Content-Type": self._content_type, + } + params = self._build_graph_name_params(graph_name) + try: + response = self.http_client.post( + f"/repositories/{self.identifier}/rdf-graphs/service", + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + finally: + if should_close: + stream.close() + + def overwrite(self, graph_name: str, data: str | bytes | BinaryIO | Graph): + """Overwrite statements in the specified graph. + + Parameters: + graph_name: The graph name of the graph. + + For the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + data: The RDF data to overwrite with. + """ + if not graph_name: + raise ValueError("Graph name must be provided.") + stream, should_close = rdf_payload_to_stream(data) + headers = { + "Content-Type": self._content_type, + } + params = self._build_graph_name_params(graph_name) + try: + response = self.http_client.put( + f"/repositories/{self.identifier}/rdf-graphs/service", + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + finally: + if should_close: + stream.close() + + def clear(self, graph_name: str): + """Clear all statements in the specified graph. + + Parameters: + graph_name: The graph name of the graph. + + For the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + """ + if not graph_name: + raise ValueError("Graph name must be provided.") + params = self._build_graph_name_params(graph_name) + response = self.http_client.delete( + f"/repositories/{self.identifier}/rdf-graphs/service", params=params + ) + response.raise_for_status() + + @dataclass(frozen=True) class RepositoryListingResult: """RDF4J repository listing result. @@ -224,6 +365,7 @@ def __init__(self, identifier: str, http_client: httpx.Client): self._identifier = identifier self._http_client = http_client self._namespace_manager: NamespaceManager | None = None + self._graph_store_manager: GraphStoreManager | None = None @property def http_client(self): @@ -243,6 +385,15 @@ def namespaces(self) -> NamespaceManager: ) return self._namespace_manager + @property + def graphs(self) -> GraphStoreManager: + """Graph store manager for the repository.""" + if self._graph_store_manager is None: + self._graph_store_manager = GraphStoreManager( + self.identifier, self.http_client + ) + return self._graph_store_manager + def health(self) -> bool: """Repository health check. @@ -316,7 +467,7 @@ def size( except (httpx.RequestError, httpx.HTTPStatusError): raise - def graphs(self) -> list[IdentifiedNode]: + def graph_names(self) -> list[IdentifiedNode]: """Get a list of all graph names in the repository. Returns: diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_repo_management.py index a9601315f..55352f2fe 100644 --- a/test/test_rdf4j/test_e2e/test_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_repo_management.py @@ -73,7 +73,7 @@ def test_repo_manager_crud(client: RDF4JClient): with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file: repo.overwrite(file) assert repo.size() == 1 - graphs = repo.graphs() + graphs = repo.graph_names() assert len(graphs) == 1 assert any(value in graphs for value in [URIRef("urn:graph:a3")]) ds = repo.get() @@ -90,7 +90,7 @@ def test_repo_manager_crud(client: RDF4JClient): assert repo.size() == 2 ds = repo.get() assert len(ds) == 2 - graphs = repo.graphs() + graphs = repo.graph_names() assert len(graphs) == 2 assert any( value in graphs for value in [URIRef("urn:graph:a"), URIRef("urn:graph:b")] @@ -131,7 +131,7 @@ def test_repo_manager_crud(client: RDF4JClient): assert repo.size() == 2 ds = repo.get() assert len(ds) == 2 - graphs = repo.graphs() + graphs = repo.graph_names() assert len(graphs) == 2 assert any( value in graphs diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py new file mode 100644 index 000000000..fc2fdc5e2 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib import URIRef +from rdflib.contrib.rdf4j.client import ( + Repository, +) + + +@pytest.mark.parametrize( + "graph_name, expected_params", + [ + ["http://example.com/graph", {"graph": "http://example.com/graph"}], + [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}], + ], +) +def test_repo_graph_store_add( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + graph_name: str | URIRef, + expected_params: dict[str, str], +): + data = " ." + mock_response = Mock(spec=httpx.Response, text=data) + mock_httpx_post = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + repo.graphs.add(graph_name, data) + headers = {"Content-Type": "application/n-triples"} + mock_httpx_post.assert_called_once_with( + "/repositories/test-repo/rdf-graphs/service", + headers=headers, + params=expected_params, + content=ANY, + ) + + +@pytest.mark.parametrize("graph_name", [None, ""]) +def test_repo_graph_store_add_invalid_graph_name( + repo: Repository, graph_name: str | None +): + with pytest.raises(ValueError): + repo.graphs.add(graph_name, "") # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py new file mode 100644 index 000000000..0e6f7c346 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib import URIRef +from rdflib.contrib.rdf4j.client import ( + Repository, +) + + +@pytest.mark.parametrize( + "graph_name, expected_params", + [ + ["http://example.com/graph", {"graph": "http://example.com/graph"}], + [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}], + ], +) +def test_repo_graph_store_add( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + graph_name: str | URIRef, + expected_params: dict[str, str], +): + mock_response = Mock(spec=httpx.Response) + mock_httpx_delete = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete) + repo.graphs.clear(graph_name) + mock_httpx_delete.assert_called_once_with( + "/repositories/test-repo/rdf-graphs/service", + params=expected_params, + ) + + +@pytest.mark.parametrize("graph_name", [None, ""]) +def test_repo_graph_store_clear_invalid_graph_name( + repo: Repository, graph_name: str | None +): + with pytest.raises(ValueError): + repo.graphs.clear(graph_name) # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py new file mode 100644 index 000000000..03b4d0c7d --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib import Graph, URIRef +from rdflib.contrib.rdf4j.client import ( + Repository, +) + + +@pytest.mark.parametrize( + "graph_name, expected_params", + [ + ["http://example.com/graph", {"graph": "http://example.com/graph"}], + [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}], + ], +) +def test_repo_graph_store_get( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + graph_name: str | URIRef, + expected_params: dict[str, str], +): + data = " ." + mock_response = Mock(spec=httpx.Response, text=data) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + graph = repo.graphs.get(graph_name) + headers = {"Accept": "application/n-triples"} + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/rdf-graphs/service", + headers=headers, + params=expected_params, + ) + assert isinstance(graph, Graph) + assert graph.isomorphic(Graph().parse(data=data)) + + +@pytest.mark.parametrize("graph_name", [None, ""]) +def test_repo_graph_store_get_invalid_graph_name( + repo: Repository, graph_name: str | None +): + with pytest.raises(ValueError): + repo.graphs.get(graph_name) # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py new file mode 100644 index 000000000..6ffed5474 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib import URIRef +from rdflib.contrib.rdf4j.client import ( + Repository, +) + + +@pytest.mark.parametrize( + "graph_name, expected_params", + [ + ["http://example.com/graph", {"graph": "http://example.com/graph"}], + [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}], + ], +) +def test_repo_graph_store_overwrite( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + graph_name: str | URIRef, + expected_params: dict[str, str], +): + data = " ." + mock_response = Mock(spec=httpx.Response) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + repo.graphs.overwrite(graph_name, data) + headers = {"Content-Type": "application/n-triples"} + mock_httpx_put.assert_called_once_with( + "/repositories/test-repo/rdf-graphs/service", + headers=headers, + params=expected_params, + content=ANY, + ) + + +@pytest.mark.parametrize("graph_name", [None, ""]) +def test_repo_graph_store_overwrite_invalid_graph_name( + repo: Repository, graph_name: str | None +): + with pytest.raises(ValueError): + repo.graphs.overwrite(graph_name, "") # type: ignore diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py index b5cc6ef13..5401ed909 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py @@ -60,7 +60,7 @@ def test_repo_graphs( mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - result = repo.graphs() + result = repo.graph_names() mock_httpx_get.assert_called_once_with( "/repositories/test-repo/contexts", headers={"Accept": "application/sparql-results+json"}, @@ -95,7 +95,7 @@ def test_repo_graphs_invalid_response( mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) with pytest.raises(expected_error): - repo.graphs() + repo.graph_names() mock_httpx_get.assert_called_once_with( "/repositories/test-repo/contexts", headers={"Accept": "application/sparql-results+json"}, From 39b83d87e013279c51c26c86b09578867af81671 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Wed, 5 Nov 2025 16:58:45 +1000 Subject: [PATCH 29/54] test: add e2e test for Repository GraphStoreManager Also fixes a httpx limitation with key-only query params. --- rdflib/contrib/rdf4j/client.py | 36 ++++++++---- .../test_e2e/test_repo_graph_store.py | 57 +++++++++++++++++++ .../repository/test_repo_graph_store_get.py | 1 + 3 files changed, 82 insertions(+), 12 deletions(-) create mode 100644 test/test_rdf4j/test_e2e/test_repo_graph_store.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 17a99486d..de1f2fb27 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -220,11 +220,23 @@ def identifier(self): def _build_graph_name_params(graph_name: URIRef | str): params = {} if isinstance(graph_name, URIRef) and graph_name == DATASET_DEFAULT_GRAPH_ID: - params["default"] = "" + # Do nothing; GraphDB does not work with `?default=`, which is the default + # behavior of httpx when setting the param value to an empty string. + # httpx completely omits query parameters whose values are `None`, so that's + # not an option either. + # The workaround is to construct our own query parameter URL when we target + # the default graph. + pass else: params["graph"] = str(graph_name) return params + def _build_url(self, graph_name: URIRef | str): + url = f"/repositories/{self.identifier}/rdf-graphs/service" + if isinstance(graph_name, URIRef) and graph_name == DATASET_DEFAULT_GRAPH_ID: + url += "?default" + return url + def get(self, graph_name: URIRef | str) -> Graph: """Fetch all statements in the specified graph. @@ -243,10 +255,10 @@ def get(self, graph_name: URIRef | str) -> Graph: headers = { "Accept": self._content_type, } - params = self._build_graph_name_params(graph_name) + params = self._build_graph_name_params(graph_name) or None response = self.http_client.get( - f"/repositories/{self.identifier}/rdf-graphs/service", + self._build_url(graph_name), headers=headers, params=params, ) @@ -256,7 +268,7 @@ def get(self, graph_name: URIRef | str) -> Graph: data=response.text, format=self._content_type ) - def add(self, graph_name: str, data: str | bytes | BinaryIO | Graph): + def add(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Graph): """Add statements to the specified graph. Parameters: @@ -273,10 +285,10 @@ def add(self, graph_name: str, data: str | bytes | BinaryIO | Graph): headers = { "Content-Type": self._content_type, } - params = self._build_graph_name_params(graph_name) + params = self._build_graph_name_params(graph_name) or None try: response = self.http_client.post( - f"/repositories/{self.identifier}/rdf-graphs/service", + self._build_url(graph_name), headers=headers, params=params, content=stream, @@ -286,7 +298,7 @@ def add(self, graph_name: str, data: str | bytes | BinaryIO | Graph): if should_close: stream.close() - def overwrite(self, graph_name: str, data: str | bytes | BinaryIO | Graph): + def overwrite(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Graph): """Overwrite statements in the specified graph. Parameters: @@ -303,10 +315,10 @@ def overwrite(self, graph_name: str, data: str | bytes | BinaryIO | Graph): headers = { "Content-Type": self._content_type, } - params = self._build_graph_name_params(graph_name) + params = self._build_graph_name_params(graph_name) or None try: response = self.http_client.put( - f"/repositories/{self.identifier}/rdf-graphs/service", + self._build_url(graph_name), headers=headers, params=params, content=stream, @@ -316,7 +328,7 @@ def overwrite(self, graph_name: str, data: str | bytes | BinaryIO | Graph): if should_close: stream.close() - def clear(self, graph_name: str): + def clear(self, graph_name: URIRef | str): """Clear all statements in the specified graph. Parameters: @@ -327,9 +339,9 @@ def clear(self, graph_name: str): """ if not graph_name: raise ValueError("Graph name must be provided.") - params = self._build_graph_name_params(graph_name) + params = self._build_graph_name_params(graph_name) or None response = self.http_client.delete( - f"/repositories/{self.identifier}/rdf-graphs/service", params=params + self._build_url(graph_name), params=params ) response.raise_for_status() diff --git a/test/test_rdf4j/test_e2e/test_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_repo_graph_store.py new file mode 100644 index 000000000..0cec4c642 --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_repo_graph_store.py @@ -0,0 +1,57 @@ +from pathlib import Path + +import pytest + +from rdflib import Graph, URIRef, Dataset +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.parametrize("graph_name", [ + URIRef("urn:graph:a"), + DATASET_DEFAULT_GRAPH_ID +]) +@pytest.mark.testcontainer +def test_e2e_repo_graph_store_crud(repo: Repository, graph_name: URIRef): + path = str(Path(__file__).parent.parent / "data/quads-2.nq") + repo.overwrite(path, graph_name) + assert repo.size() == 1 + + graph = repo.graphs.get(graph_name) + assert isinstance(graph, Graph) + assert len(graph) == 1 + ds = Dataset().parse(path, format="nquads") + expected_graph = Graph().parse(data=ds.serialize(format="ntriples")) + assert len(expected_graph) == 1 + assert graph.isomorphic(expected_graph) + + # Add to the graph + repo.graphs.add( + graph_name, + " .", + ) + assert repo.size() == 2 + graph = repo.graphs.get(graph_name) + assert isinstance(graph, Graph) + assert len(graph) == 2 + expected_graph.add((URIRef("http://example.org/s4"), URIRef("http://example.org/p4"), URIRef("http://example.org/o4"))) + assert graph.isomorphic(expected_graph) + + # Overwrite the graph + repo.graphs.overwrite( + graph_name, + " .", + ) + assert repo.size() == 1 + graph = repo.graphs.get(graph_name) + assert isinstance(graph, Graph) + assert len(graph) == 1 + expected_graph = Graph().parse(data=" .") + assert graph.isomorphic(expected_graph) + + # Clear the graph + repo.graphs.clear(graph_name) + assert repo.size() == 0 + graph = repo.graphs.get(graph_name) + assert isinstance(graph, Graph) + assert len(graph) == 0 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py index 03b4d0c7d..fbbf1ad19 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py @@ -37,6 +37,7 @@ def test_repo_graph_store_get( ) assert isinstance(graph, Graph) assert graph.isomorphic(Graph().parse(data=data)) + assert graph.identifier == URIRef(graph_name) @pytest.mark.parametrize("graph_name", [None, ""]) From b84c4982a94b9629223f540ac79e526759c65e16 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Wed, 5 Nov 2025 17:04:00 +1000 Subject: [PATCH 30/54] chore: remove redundant re-raise of httpx exceptions --- rdflib/contrib/rdf4j/client.py | 317 ++++++++++++--------------------- 1 file changed, 114 insertions(+), 203 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index de1f2fb27..6b003fdb9 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -69,50 +69,38 @@ def list(self) -> list[NamespaceListingResult]: Raises: RepositoryFormatError: If the response format is unrecognized. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ headers = { "Accept": "application/sparql-results+json", } + response = self.http_client.get( + f"/repositories/{self.identifier}/namespaces", headers=headers + ) + response.raise_for_status() + try: - response = self.http_client.get( - f"/repositories/{self.identifier}/namespaces", headers=headers - ) - response.raise_for_status() + data = response.json() + results = data["results"]["bindings"] + return [ + NamespaceListingResult( + prefix=row["prefix"]["value"], + namespace=row["namespace"]["value"], + ) + for row in results + ] + except (KeyError, ValueError) as err: + raise RepositoryFormatError(f"Unrecognised response format: {err}") - try: - data = response.json() - results = data["results"]["bindings"] - return [ - NamespaceListingResult( - prefix=row["prefix"]["value"], - namespace=row["namespace"]["value"], - ) - for row in results - ] - except (KeyError, ValueError) as err: - raise RepositoryFormatError(f"Unrecognised response format: {err}") - except (httpx.RequestError, httpx.HTTPStatusError): - raise def clear(self): - """Clear all namespace declarations in the repository. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. - """ + """Clear all namespace declarations in the repository.""" headers = { "Accept": "application/sparql-results+json", } - try: - response = self.http_client.delete( - f"/repositories/{self.identifier}/namespaces", headers=headers - ) - response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise + response = self.http_client.delete( + f"/repositories/{self.identifier}/namespaces", headers=headers + ) + response.raise_for_status() def get(self, prefix: str) -> str | None: """Get the namespace URI for a given prefix. @@ -122,10 +110,6 @@ def get(self, prefix: str) -> str | None: Returns: The namespace URI or `None` if not found. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ if not prefix: raise ValueError("Prefix cannot be empty.") @@ -142,8 +126,6 @@ def get(self, prefix: str) -> str | None: if err.response.status_code == 404: return None raise - except httpx.RequestError: - raise def set(self, prefix: str, namespace: str): """Set the namespace URI for a given prefix. @@ -155,10 +137,6 @@ def set(self, prefix: str, namespace: str): Parameters: prefix: The prefix to set. namespace: The namespace URI to set. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ if not prefix: raise ValueError("Prefix cannot be empty.") @@ -167,15 +145,12 @@ def set(self, prefix: str, namespace: str): headers = { "Content-Type": "text/plain", } - try: - response = self.http_client.put( - f"/repositories/{self.identifier}/namespaces/{prefix}", - headers=headers, - content=namespace, - ) - response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise + response = self.http_client.put( + f"/repositories/{self.identifier}/namespaces/{prefix}", + headers=headers, + content=namespace, + ) + response.raise_for_status() def remove(self, prefix: str): """Remove the namespace declaration for a given prefix. @@ -185,13 +160,10 @@ def remove(self, prefix: str): """ if not prefix: raise ValueError("Prefix cannot be empty.") - try: - response = self.http_client.delete( - f"/repositories/{self.identifier}/namespaces/{prefix}" - ) - response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise + response = self.http_client.delete( + f"/repositories/{self.identifier}/namespaces/{prefix}" + ) + response.raise_for_status() class GraphStoreManager: @@ -415,8 +387,6 @@ def health(self) -> bool: Raises: RepositoryNotFoundError: If the repository is not found. RepositoryNotHealthyError: If the repository is not healthy. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ headers = { "Content-Type": "application/sparql-query", @@ -436,8 +406,6 @@ def health(self) -> bool: raise RepositoryNotHealthyError( f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}" ) - except httpx.RequestError: - raise def size( self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None @@ -457,27 +425,22 @@ def size( Raises: RepositoryFormatError: Fails to parse the repository size. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ params: dict[str, str] = {} build_context_param(params, graph_name) + response = self.http_client.get( + f"/repositories/{self.identifier}/size", params=params + ) + response.raise_for_status() try: - response = self.http_client.get( - f"/repositories/{self.identifier}/size", params=params - ) - response.raise_for_status() - try: - value = int(response.text) - if value >= 0: - return value - raise ValueError(f"Invalid repository size: {value}") - except ValueError as err: - raise RepositoryFormatError( - f"Failed to parse repository size: {err}" - ) from err - except (httpx.RequestError, httpx.HTTPStatusError): - raise + value = int(response.text) + if value >= 0: + return value + raise ValueError(f"Invalid repository size: {value}") + except ValueError as err: + raise RepositoryFormatError( + f"Failed to parse repository size: {err}" + ) from err def graph_names(self) -> list[IdentifiedNode]: """Get a list of all graph names in the repository. @@ -487,35 +450,30 @@ def graph_names(self) -> list[IdentifiedNode]: Raises: RepositoryFormatError: Fails to parse the repository graph names. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ + headers = { + "Accept": "application/sparql-results+json", + } + response = self.http_client.get( + f"/repositories/{self.identifier}/contexts", headers=headers + ) + response.raise_for_status() try: - headers = { - "Accept": "application/sparql-results+json", - } - response = self.http_client.get( - f"/repositories/{self.identifier}/contexts", headers=headers - ) - response.raise_for_status() - try: - values: list[IdentifiedNode] = [] - for row in response.json()["results"]["bindings"]: - value = row["contextID"]["value"] - value_type = row["contextID"]["type"] - if value_type == "uri": - values.append(URIRef(value)) - elif value_type == "bnode": - values.append(BNode(value)) - else: - raise ValueError(f"Invalid graph name type: {value_type}") - return values - except Exception as err: - raise RepositoryFormatError( - f"Failed to parse repository graph names: {err}" - ) from err - except (httpx.RequestError, httpx.HTTPStatusError): - raise + values: list[IdentifiedNode] = [] + for row in response.json()["results"]["bindings"]: + value = row["contextID"]["value"] + value_type = row["contextID"]["type"] + if value_type == "uri": + values.append(URIRef(value)) + elif value_type == "bnode": + values.append(BNode(value)) + else: + raise ValueError(f"Invalid graph name type: {value_type}") + return values + except Exception as err: + raise RepositoryFormatError( + f"Failed to parse repository graph names: {err}" + ) from err def get( self, @@ -548,10 +506,6 @@ def get( Returns: A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object with the repository namespace prefixes bound to it. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ if content_type is None: content_type = "application/n-quads" @@ -561,30 +515,27 @@ def get( build_spo_param(params, subj, pred, obj) build_infer_param(params, infer=infer) + response = self.http_client.get( + f"/repositories/{self.identifier}/statements", + headers=headers, + params=params, + ) + response.raise_for_status() + triple_formats = [ + "application/n-triples", + "text/turtle", + "application/rdf+xml", + ] try: - response = self.http_client.get( - f"/repositories/{self.identifier}/statements", - headers=headers, - params=params, - ) - response.raise_for_status() - triple_formats = [ - "application/n-triples", - "text/turtle", - "application/rdf+xml", - ] - try: - if content_type in triple_formats: - retval = Graph().parse(data=response.text, format=content_type) - else: - retval = Dataset().parse(data=response.text, format=content_type) - for result in self.namespaces.list(): - retval.bind(result.prefix, result.namespace, replace=True) - return retval - except Exception as err: - raise RDFLibParserError(f"Error parsing RDF: {err}") from err - except (httpx.RequestError, httpx.HTTPStatusError): - raise + if content_type in triple_formats: + retval = Graph().parse(data=response.text, format=content_type) + else: + retval = Dataset().parse(data=response.text, format=content_type) + for result in self.namespaces.list(): + retval.bind(result.prefix, result.namespace, replace=True) + return retval + except Exception as err: + raise RDFLibParserError(f"Error parsing RDF: {err}") from err # TODO: This only covers appending statements to a repository. # We still need to implement sparql update and transaction document. @@ -601,10 +552,6 @@ def upload( base_uri: The base URI to resolve against for any relative URIs in the data. content_type: The content type of the data. Defaults to `application/n-quads` when the value is `None`. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ stream, should_close = rdf_payload_to_stream(data) try: @@ -619,8 +566,6 @@ def upload( content=stream, ) response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise finally: if should_close: stream.close() @@ -646,10 +591,6 @@ def overwrite( base_uri: The base URI to resolve against for any relative URIs in the data. content_type: The content type of the data. Defaults to `application/n-quads` when the value is `None`. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ stream, should_close = rdf_payload_to_stream(data) @@ -666,8 +607,6 @@ def overwrite( content=stream, ) response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise finally: if should_close: stream.close() @@ -691,23 +630,16 @@ def delete( To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. - - Raises: - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ params: dict[str, str] = {} build_context_param(params, graph_name) build_spo_param(params, subj, pred, obj) - try: - response = self.http_client.delete( - f"/repositories/{self.identifier}/statements", - params=params, - ) - response.raise_for_status() - except (httpx.RequestError, httpx.HTTPStatusError): - raise + response = self.http_client.delete( + f"/repositories/{self.identifier}/statements", + params=params, + ) + response.raise_for_status() class RepositoryManager: @@ -728,33 +660,28 @@ def list(self) -> list[RepositoryListingResult]: Raises: RepositoryFormatError: If the response format is unrecognized. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ headers = { "Accept": "application/sparql-results+json", } - try: - response = self.http_client.get("/repositories", headers=headers) - response.raise_for_status() + response = self.http_client.get("/repositories", headers=headers) + response.raise_for_status() - try: - data = response.json() - results = data["results"]["bindings"] - return [ - RepositoryListingResult( - identifier=repo["id"]["value"], - uri=repo["uri"]["value"], - readable=repo["readable"]["value"], - writable=repo["writable"]["value"], - title=repo.get("title", {}).get("value"), - ) - for repo in results - ] - except (KeyError, ValueError) as err: - raise RepositoryFormatError(f"Unrecognised response format: {err}") - except (httpx.RequestError, httpx.HTTPStatusError): - raise + try: + data = response.json() + results = data["results"]["bindings"] + return [ + RepositoryListingResult( + identifier=repo["id"]["value"], + uri=repo["uri"]["value"], + readable=repo["readable"]["value"], + writable=repo["writable"]["value"], + title=repo.get("title", {}).get("value"), + ) + for repo in results + ] + except (KeyError, ValueError) as err: + raise RepositoryFormatError(f"Unrecognised response format: {err}") def get(self, repository_id: str) -> Repository: """Get a repository by ID. @@ -771,15 +698,10 @@ def get(self, repository_id: str) -> Repository: Raises: RepositoryNotFoundError: If the repository is not found. RepositoryNotHealthyError: If the repository is not healthy. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ repo = Repository(repository_id, self.http_client) - try: - repo.health() - return repo - except (RepositoryNotFoundError, RepositoryNotHealthyError, httpx.RequestError): - raise + repo.health() + return repo def create( self, repository_id: str, data: str, content_type: str = "text/turtle" @@ -794,8 +716,6 @@ def create( Raises: RepositoryAlreadyExistsError: If the repository already exists. RepositoryNotHealthyError: If the repository is not healthy. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ try: headers = {"Content-Type": content_type} @@ -804,8 +724,6 @@ def create( ) response.raise_for_status() return self.get(repository_id) - except httpx.RequestError: - raise except httpx.HTTPStatusError as err: if err.response.status_code == 409: raise RepositoryAlreadyExistsError( @@ -822,8 +740,6 @@ def delete(self, repository_id: str) -> None: Raises: RepositoryNotFoundError: If the repository is not found. RepositoryError: If the repository is not deleted successfully. - httpx.RequestError: On network/connection issues. - httpx.HTTPStatusError: Unhandled status code error. """ try: response = self.http_client.delete(f"/repositories/{repository_id}") @@ -832,8 +748,6 @@ def delete(self, repository_id: str) -> None: raise RepositoryError( f"Unexpected response status code when deleting repository {repository_id}: {response.status_code} - {response.text.strip()}" ) - except httpx.RequestError: - raise except httpx.HTTPStatusError as err: if err.response.status_code == 404: raise RepositoryNotFoundError(f"Repository {repository_id} not found.") @@ -888,14 +802,11 @@ def repositories(self) -> RepositoryManager: @property def protocol(self) -> float: - try: - response = self.http_client.get( - "/protocol", headers={"Accept": "text/plain"} - ) - response.raise_for_status() - return float(response.text.strip()) - except (httpx.RequestError, httpx.HTTPStatusError): - raise + response = self.http_client.get( + "/protocol", headers={"Accept": "text/plain"} + ) + response.raise_for_status() + return float(response.text.strip()) def close(self): """Close the underlying httpx.Client.""" From fb464e426d343f217bb6b269f707653d17ba0b77 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Wed, 5 Nov 2025 17:05:13 +1000 Subject: [PATCH 31/54] docs: add docstring to protocol method --- rdflib/contrib/rdf4j/client.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 6b003fdb9..6c32ffc17 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -802,6 +802,11 @@ def repositories(self) -> RepositoryManager: @property def protocol(self) -> float: + """The RDF4J REST API protocol version. + + Returns: + The protocol version number. + """ response = self.http_client.get( "/protocol", headers={"Accept": "text/plain"} ) From 28bf65f6a2ce4604efe64457556daaf655b7789a Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Wed, 5 Nov 2025 17:11:54 +1000 Subject: [PATCH 32/54] style: formatting --- rdflib/contrib/rdf4j/client.py | 9 ++------ .../test_e2e/test_repo_graph_store.py | 23 ++++++++++++------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 6c32ffc17..f6ac77a5c 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -91,7 +91,6 @@ def list(self) -> list[NamespaceListingResult]: except (KeyError, ValueError) as err: raise RepositoryFormatError(f"Unrecognised response format: {err}") - def clear(self): """Clear all namespace declarations in the repository.""" headers = { @@ -312,9 +311,7 @@ def clear(self, graph_name: URIRef | str): if not graph_name: raise ValueError("Graph name must be provided.") params = self._build_graph_name_params(graph_name) or None - response = self.http_client.delete( - self._build_url(graph_name), params=params - ) + response = self.http_client.delete(self._build_url(graph_name), params=params) response.raise_for_status() @@ -807,9 +804,7 @@ def protocol(self) -> float: Returns: The protocol version number. """ - response = self.http_client.get( - "/protocol", headers={"Accept": "text/plain"} - ) + response = self.http_client.get("/protocol", headers={"Accept": "text/plain"}) response.raise_for_status() return float(response.text.strip()) diff --git a/test/test_rdf4j/test_e2e/test_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_repo_graph_store.py index 0cec4c642..ba5f610bc 100644 --- a/test/test_rdf4j/test_e2e/test_repo_graph_store.py +++ b/test/test_rdf4j/test_e2e/test_repo_graph_store.py @@ -2,15 +2,14 @@ import pytest -from rdflib import Graph, URIRef, Dataset -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +from rdflib import Dataset, Graph, URIRef from rdflib.contrib.rdf4j.client import Repository +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID -@pytest.mark.parametrize("graph_name", [ - URIRef("urn:graph:a"), - DATASET_DEFAULT_GRAPH_ID -]) +@pytest.mark.parametrize( + "graph_name", [URIRef("urn:graph:a"), DATASET_DEFAULT_GRAPH_ID] +) @pytest.mark.testcontainer def test_e2e_repo_graph_store_crud(repo: Repository, graph_name: URIRef): path = str(Path(__file__).parent.parent / "data/quads-2.nq") @@ -34,7 +33,13 @@ def test_e2e_repo_graph_store_crud(repo: Repository, graph_name: URIRef): graph = repo.graphs.get(graph_name) assert isinstance(graph, Graph) assert len(graph) == 2 - expected_graph.add((URIRef("http://example.org/s4"), URIRef("http://example.org/p4"), URIRef("http://example.org/o4"))) + expected_graph.add( + ( + URIRef("http://example.org/s4"), + URIRef("http://example.org/p4"), + URIRef("http://example.org/o4"), + ) + ) assert graph.isomorphic(expected_graph) # Overwrite the graph @@ -46,7 +51,9 @@ def test_e2e_repo_graph_store_crud(repo: Repository, graph_name: URIRef): graph = repo.graphs.get(graph_name) assert isinstance(graph, Graph) assert len(graph) == 1 - expected_graph = Graph().parse(data=" .") + expected_graph = Graph().parse( + data=" ." + ) assert graph.isomorphic(expected_graph) # Clear the graph From 8efe9d07b20dd1aa8368c1399eede24258afc62e Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 11:12:57 +1000 Subject: [PATCH 33/54] feat: add Repository.query method --- rdflib/contrib/rdf4j/client.py | 42 ++++++ test/test_rdf4j/test_e2e/test_repo_query.py | 56 ++++++++ .../test_unit/repository/test_repo_query.py | 125 ++++++++++++++++++ 3 files changed, 223 insertions(+) create mode 100644 test/test_rdf4j/test_e2e/test_repo_query.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_query.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index f6ac77a5c..6e9997bb0 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -2,6 +2,7 @@ from __future__ import annotations +import io import typing as t from dataclasses import dataclass from typing import Any, BinaryIO, Iterable @@ -25,6 +26,8 @@ rdf_payload_to_stream, ) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph +from rdflib.plugins.sparql import prepareQuery +from rdflib.query import Result from rdflib.term import IdentifiedNode, Literal, URIRef SubjectType = t.Union[IdentifiedNode, None] @@ -439,6 +442,45 @@ def size( f"Failed to parse repository size: {err}" ) from err + def query(self, query: str, **kwargs): + """Execute a SPARQL query against the repository. + + !!! note + A POST request is used by default. If any keyword arguments are provided, + a GET request is used instead, and the arguments are passed as query parameters. + + Parameters: + query: The SPARQL query to execute. + **kwargs: Additional keyword arguments to include as query parameters + in the request. See + [RDF4J REST API - Execute SPARQL query](https://rdf4j.org/documentation/reference/rest-api/#tag/SPARQL/paths/~1repositories~1%7BrepositoryID%7D/get) + for the list of supported query parameters. + """ + prepared_query = prepareQuery(query) + headers = {"Content-Type": "application/sparql-query"} + if prepared_query.algebra.name in ("SelectQuery", "AskQuery"): + headers["Accept"] = "application/sparql-results+json" + elif prepared_query.algebra.name in ("ConstructQuery", "DescribeQuery"): + headers["Accept"] = "application/n-triples" + else: + raise ValueError(f"Unsupported query type: {prepared_query.algebra.name}") + + if not kwargs: + response = self.http_client.post( + f"/repositories/{self.identifier}", headers=headers, content=query + ) + else: + response = self.http_client.get( + f"/repositories/{self.identifier}", + headers=headers, + params={"query": query, **kwargs}, + ) + response.raise_for_status() + return Result.parse( + io.BytesIO(response.content), + content_type=response.headers["Content-Type"].split(";")[0], + ) + def graph_names(self) -> list[IdentifiedNode]: """Get a list of all graph names in the repository. diff --git a/test/test_rdf4j/test_e2e/test_repo_query.py b/test/test_rdf4j/test_e2e/test_repo_query.py new file mode 100644 index 000000000..7cc1863da --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_repo_query.py @@ -0,0 +1,56 @@ +from pathlib import Path + +from rdflib.contrib.rdf4j.client import Repository +from rdflib.term import URIRef, Variable + + +def test_e2e_repo_query(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + assert repo.size() == 2 + + query = "select ?s ?p ?o where { ?s ?p ?o }" + results = repo.query(query) + assert len(results) == 2 + s_var = Variable("s") + p_var = Variable("p") + o_var = Variable("o") + subjects = [URIRef("http://example.org/s"), URIRef("http://example.org/s2")] + predicates = [URIRef("http://example.org/p"), URIRef("http://example.org/p2")] + objects = [URIRef("http://example.org/o"), URIRef("http://example.org/o2")] + for row in results.bindings: + assert row.get(s_var) in subjects + assert row.get(p_var) in predicates + assert row.get(o_var) in objects + + query = "ask where { ?s ?p ?o }" + results = repo.query(query) + assert results.askAnswer is True + + query = "ask where { }" + results = repo.query(query) + assert results.askAnswer is False + + query = "construct { ?s ?p ?o } where { graph { ?s ?p ?o } }" + results = repo.query(query) + assert len(results.graph) == 1 + assert ( + URIRef("http://example.org/s"), + URIRef("http://example.org/p"), + URIRef("http://example.org/o"), + ) in results.graph + + query = "describe " + results = repo.query(query) + assert len(results.graph) == 1 + assert ( + URIRef("http://example.org/s2"), + URIRef("http://example.org/p2"), + URIRef("http://example.org/o2"), + ) in results.graph + + # Provide a keyword argument "limit" to the query method + # We have 2 statements in the repository, and this should return only one + query = "select ?s ?p ?o where { ?s ?p ?o }" + results = repo.query(query, limit=1) + assert len(results) == 1 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py new file mode 100644 index 000000000..d6ec1da50 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib import Graph +from rdflib.contrib.rdf4j.client import ( + Repository, +) +from rdflib.term import URIRef, Variable + + +@pytest.mark.parametrize( + "query, accept_header, response_text, expected_result_type", + [ + [ + "select ?s where { ?s ?p ?o }", + "application/sparql-results+json", + """ + { + "head": { + "vars": ["s"] + }, + "results": { + "bindings": [{"s": {"value": "http://example.com/s", "type": "uri"}}] + } + } + """, + "SELECT", + ], + [ + "ask where { ?s ?p ?o }", + "application/sparql-results+json", + '{ "boolean": true }', + "ASK", + ], + [ + "construct { ?s ?p ?o } where { ?s ?p ?o }", + "application/n-triples", + " .", + "CONSTRUCT", + ], + [ + "describe ?s", + "application/n-triples", + " .", + "CONSTRUCT", + ], + ], +) +def test_repo_query( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + query: str, + accept_header: str, + response_text: str, + expected_result_type, +): + mock_response = Mock( + spec=httpx.Response, + content=response_text.encode("utf-8"), + headers={"Content-Type": accept_header}, + ) + mock_httpx_post = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + result = repo.query(query) + assert result.type == expected_result_type + headers = {"Accept": accept_header, "Content-Type": "application/sparql-query"} + mock_httpx_post.assert_called_once_with( + "/repositories/test-repo", + headers=headers, + content=query, + ) + + if expected_result_type == "SELECT": + assert len(result) == 1 + s_var = Variable("s") + assert result.vars == [s_var] + assert result.bindings[0].get(s_var) == URIRef("http://example.com/s") + elif expected_result_type == "ASK": + assert result.askAnswer is True + elif expected_result_type == "CONSTRUCT": + assert len(result.graph) == 1 + assert ( + Graph() + .parse( + data=" ." + ) + .isomorphic(result.graph) + ) + else: + assert False, "Unexpected result type" + + +def test_repo_query_value_error(repo: Repository, monkeypatch: pytest.MonkeyPatch): + mock_query = Mock() + mock_query.algebra.name = "InvalidQueryType" + monkeypatch.setattr( + "rdflib.contrib.rdf4j.client.prepareQuery", lambda _: mock_query + ) + with pytest.raises(ValueError, match="Unsupported query type: InvalidQueryType"): + repo.query("") + + +def test_repo_query_kwargs(repo: Repository, monkeypatch: pytest.MonkeyPatch): + """The query method uses GET if a keyword argument is provided.""" + mock_response = Mock( + spec=httpx.Response, + content=b" .", + headers={"Content-Type": "application/n-triples"}, + ) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + query = "construct { ?s ?p ?o } where { ?s ?p ?o }" + repo.query(query, infer="true") + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo", + headers={ + "Accept": "application/n-triples", + "Content-Type": "application/sparql-query", + }, + params={"query": query, "infer": "true"}, + ) From aadee521a34bcfc11d664fe350dc34a200ade008 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 11:40:49 +1000 Subject: [PATCH 34/54] feat: add Repository.update method --- rdflib/contrib/rdf4j/client.py | 12 ++++++++++ test/test_rdf4j/test_e2e/test_repo_update.py | 13 ++++++++++ .../test_unit/repository/test_repo_update.py | 24 +++++++++++++++++++ 3 files changed, 49 insertions(+) create mode 100644 test/test_rdf4j/test_e2e/test_repo_update.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_update.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 6e9997bb0..786fc3d10 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -481,6 +481,18 @@ def query(self, query: str, **kwargs): content_type=response.headers["Content-Type"].split(";")[0], ) + def update(self, query: str): + """Execute a SPARQL update operation on the repository. + + Parameters: + query: The SPARQL update query to execute. + """ + headers = {"Content-Type": "application/sparql-update"} + response = self.http_client.post( + f"/repositories/{self.identifier}/statements", headers=headers, content=query + ) + response.raise_for_status() + def graph_names(self) -> list[IdentifiedNode]: """Get a list of all graph names in the repository. diff --git a/test/test_rdf4j/test_e2e/test_repo_update.py b/test/test_rdf4j/test_e2e/test_repo_update.py new file mode 100644 index 000000000..15762c9ee --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_repo_update.py @@ -0,0 +1,13 @@ +from pathlib import Path + +from rdflib.contrib.rdf4j.client import Repository + + +def test_e2e_repo_query(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + assert repo.size() == 2 + + query = """INSERT DATA { GRAPH { } }""" + repo.update(query) + assert repo.size() == 3 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_update.py b/test/test_rdf4j/test_unit/repository/test_repo_update.py new file mode 100644 index 000000000..75f00509f --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_update.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Repository, +) + + +def test_repo_update(repo: Repository, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response, status_code=204) + mock_httpx_post = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + repo.update( + "insert data { }" + ) + mock_httpx_post.assert_called_once_with( + "/repositories/test-repo", + headers={"Content-Type": "application/sparql-update"}, + content="insert data { }", + ) From 4c39a79637ce150cab2679305457b94ccd8b71ef Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:29:32 +1000 Subject: [PATCH 35/54] feat: add Repository Transaction with ping and commit --- rdflib/contrib/rdf4j/client.py | 100 +++++++++++++++++- rdflib/contrib/rdf4j/exceptions.py | 16 +++ .../test_e2e/test_repo_transaction.py | 18 ++++ .../repository/test_repo_transaction.py | 85 +++++++++++++++ 4 files changed, 217 insertions(+), 2 deletions(-) create mode 100644 test/test_rdf4j/test_e2e/test_repo_transaction.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 786fc3d10..b2723c21f 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib import io import typing as t from dataclasses import dataclass @@ -18,6 +19,9 @@ RepositoryFormatError, RepositoryNotFoundError, RepositoryNotHealthyError, + TransactionClosedError, + TransactionCommitError, + TransactionPingError, ) from rdflib.contrib.rdf4j.util import ( build_context_param, @@ -489,7 +493,9 @@ def update(self, query: str): """ headers = {"Content-Type": "application/sparql-update"} response = self.http_client.post( - f"/repositories/{self.identifier}/statements", headers=headers, content=query + f"/repositories/{self.identifier}/statements", + headers=headers, + content=query, ) response.raise_for_status() @@ -692,9 +698,99 @@ def delete( ) response.raise_for_status() + @contextlib.contextmanager + def transaction(self): + """Create a new transaction for the repository.""" + with Transaction(self.identifier, self.http_client) as txn: + yield txn + + +class Transaction: + """An RDF4J transaction. + + Parameters: + identifier: The identifier of the repository. + http_client: The httpx.Client instance. + """ + + def __init__(self, identifier: str, http_client: httpx.Client): + self._identifier = identifier + self._http_client = http_client + self._url: str | None = self._start_transaction() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self._url is not None: + self.commit() + + @property + def http_client(self): + return self._http_client + + @property + def identifier(self): + """Repository identifier.""" + return self._identifier + + @property + def url(self): + """The transaction URL.""" + return self._url + + def _raise_for_closed(self): + if self._url is None: + raise TransactionClosedError("The transaction has been closed.") + + def _start_transaction(self) -> str: + response = self.http_client.post( + f"/repositories/{self.identifier}/transactions" + ) + response.raise_for_status() + return response.headers["Location"] + + def _close_transaction(self): + self._url = None + + def commit(self): + """Commit the transaction. + + Raises: + TransactionCommitError: If the transaction commit fails. + TransactionClosedError: If the transaction is closed. + """ + self._raise_for_closed() + params = {"action": "COMMIT"} + response = self.http_client.put(self.url, params=params) + if response.status_code != 200: + raise TransactionCommitError( + f"Transaction commit failed: {response.status_code} - {response.text}" + ) + self._close_transaction() + + def ping(self): + """Ping the transaction. + + Raises: + RepositoryTransactionPingError: If the transaction ping fails. + TransactionClosedError: If the transaction is closed. + """ + self._raise_for_closed() + params = {"action": "PING"} + response = self.http_client.put(self.url, params=params) + if response.status_code != 200: + raise TransactionPingError( + f"Transaction ping failed: {response.status_code} - {response.text}" + ) + class RepositoryManager: - """A client to manage server-level repository operations.""" + """A client to manage server-level repository operations. + + Parameters: + http_client: The httpx.Client instance. + """ def __init__(self, http_client: httpx.Client): self._http_client = http_client diff --git a/rdflib/contrib/rdf4j/exceptions.py b/rdflib/contrib/rdf4j/exceptions.py index a59b6d221..b09e093e1 100644 --- a/rdflib/contrib/rdf4j/exceptions.py +++ b/rdflib/contrib/rdf4j/exceptions.py @@ -27,3 +27,19 @@ class RDF4JUnsupportedProtocolError(Exception): class RDFLibParserError(Exception): """Raised when there is an error parsing the RDF document.""" + + +class RepositoryTransactionError(Exception): + """Raised when there is an error with the transaction.""" + + +class TransactionClosedError(RepositoryTransactionError): + """Raised when the transaction has been closed.""" + + +class TransactionPingError(RepositoryTransactionError): + """Raised when there is an error pinging the transaction.""" + + +class TransactionCommitError(RepositoryTransactionError): + """Raised when there is an error committing the transaction.""" diff --git a/test/test_rdf4j/test_e2e/test_repo_transaction.py b/test/test_rdf4j/test_e2e/test_repo_transaction.py new file mode 100644 index 000000000..3adb2598e --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_repo_transaction.py @@ -0,0 +1,18 @@ +from pathlib import Path + +import pytest + +from rdflib.contrib.rdf4j.client import Repository +from rdflib.contrib.rdf4j.exceptions import TransactionClosedError + + +def test_e2e_repo_transaction(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + assert repo.size() == 2 + + with repo.transaction() as txn: + txn.ping() + + with pytest.raises(TransactionClosedError): + txn.ping() diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction.py new file mode 100644 index 000000000..3175d1dda --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Repository, +) +from rdflib.contrib.rdf4j.exceptions import ( + TransactionClosedError, + TransactionPingError, +) + + +def test_repo_transaction_commit( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, +): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + # Ensure the transaction is created. + assert txn.url == transaction_url + mock_httpx_post.assert_called_once_with( + "/repositories/test-repo/transactions", + ) + + # Mock commit response. + mock_transaction_commit_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_transaction_commit_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + # Explicitly commit. This closes the transaction. + txn.commit() + mock_httpx_put.assert_called_once_with( + transaction_url, + params={"action": "COMMIT"}, + ) + # Ensure it is closed. + assert txn.url is None + with pytest.raises(TransactionClosedError): + txn.ping() + + with repo.transaction() as txn: + txn.ping() + + with pytest.raises(TransactionClosedError): + # Ensure that the context manager closes the transaction. + txn.ping() + + +def test_repo_transaction_ping(repo: Repository, monkeypatch: pytest.MonkeyPatch): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + # Test a successful ping. + mock_ping_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.ping() + mock_httpx_put.assert_called_once_with( + transaction_url, + params={"action": "PING"}, + ) + + # Ensure it raises TransactionPingError. + mock_ping_response = Mock(spec=httpx.Response, status_code=405) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + with pytest.raises(TransactionPingError): + txn.ping() + + # Mock successful commit. + mock_commit_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_commit_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) From 323f303b8769fc45cf558d59fe956472b91e639f Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:33:30 +1000 Subject: [PATCH 36/54] refactor: prep Repository.size method to be used by transactions as well --- rdflib/contrib/rdf4j/client.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index b2723c21f..f5d210cba 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -430,10 +430,17 @@ def size( Raises: RepositoryFormatError: Fails to parse the repository size. """ + return self._size(graph_name) + + def _size( + self, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + url: str | None = None, + ): params: dict[str, str] = {} build_context_param(params, graph_name) response = self.http_client.get( - f"/repositories/{self.identifier}/size", params=params + url or f"/repositories/{self.identifier}/size", params=params ) response.raise_for_status() try: From 4920a3e9b561146aa5db8ab64f515b3c3548ed3d Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:35:30 +1000 Subject: [PATCH 37/54] test: refactor e2e test file names to avoid clash --- .../{test_repo_graph_store.py => test_e2e_repo_graph_store.py} | 0 .../{test_repo_management.py => test_e2e_repo_management.py} | 0 .../{test_repo_namespace.py => test_e2e_repo_namespace.py} | 0 .../test_e2e/{test_repo_query.py => test_e2e_repo_query.py} | 0 .../{test_repo_transaction.py => test_e2e_repo_transaction.py} | 0 .../test_e2e/{test_repo_update.py => test_e2e_repo_update.py} | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename test/test_rdf4j/test_e2e/{test_repo_graph_store.py => test_e2e_repo_graph_store.py} (100%) rename test/test_rdf4j/test_e2e/{test_repo_management.py => test_e2e_repo_management.py} (100%) rename test/test_rdf4j/test_e2e/{test_repo_namespace.py => test_e2e_repo_namespace.py} (100%) rename test/test_rdf4j/test_e2e/{test_repo_query.py => test_e2e_repo_query.py} (100%) rename test/test_rdf4j/test_e2e/{test_repo_transaction.py => test_e2e_repo_transaction.py} (100%) rename test/test_rdf4j/test_e2e/{test_repo_update.py => test_e2e_repo_update.py} (100%) diff --git a/test/test_rdf4j/test_e2e/test_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_graph_store.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py diff --git a/test/test_rdf4j/test_e2e/test_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_management.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_management.py diff --git a/test/test_rdf4j/test_e2e/test_repo_namespace.py b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_namespace.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py diff --git a/test/test_rdf4j/test_e2e/test_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_query.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_query.py diff --git a/test/test_rdf4j/test_e2e/test_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_transaction.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py diff --git a/test/test_rdf4j/test_e2e/test_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py similarity index 100% rename from test/test_rdf4j/test_e2e/test_repo_update.py rename to test/test_rdf4j/test_e2e/test_e2e_repo_update.py From e1de8e4826a14744014755567a8fe6fc5e79b25a Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:38:53 +1000 Subject: [PATCH 38/54] test: fix test --- test/test_rdf4j/test_unit/repository/test_repo_update.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_update.py b/test/test_rdf4j/test_unit/repository/test_repo_update.py index 75f00509f..09696e86b 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_update.py @@ -18,7 +18,7 @@ def test_repo_update(repo: Repository, monkeypatch: pytest.MonkeyPatch): "insert data { }" ) mock_httpx_post.assert_called_once_with( - "/repositories/test-repo", + "/repositories/test-repo/statements", headers={"Content-Type": "application/sparql-update"}, content="insert data { }", ) From 96375cc9db2455c9bc01789f36480de192ee1a2f Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:47:53 +1000 Subject: [PATCH 39/54] Revert "refactor: prep Repository.size method to be used by transactions as well" This reverts commit 323f303b8769fc45cf558d59fe956472b91e639f. --- rdflib/contrib/rdf4j/client.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index f5d210cba..b2723c21f 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -430,17 +430,10 @@ def size( Raises: RepositoryFormatError: Fails to parse the repository size. """ - return self._size(graph_name) - - def _size( - self, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, - url: str | None = None, - ): params: dict[str, str] = {} build_context_param(params, graph_name) response = self.http_client.get( - url or f"/repositories/{self.identifier}/size", params=params + f"/repositories/{self.identifier}/size", params=params ) response.raise_for_status() try: From f472b30d1f5aad0354d0f58698f81e5fe2b0231e Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 13:53:18 +1000 Subject: [PATCH 40/54] test: fix test --- test/test_rdf4j/test_unit/util/test_rdf4j_util.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py index ea4d4e5b6..fcc978bed 100644 --- a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py +++ b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py @@ -2,6 +2,7 @@ import io import typing as t +from pathlib import Path import pytest @@ -37,7 +38,7 @@ def test_build_context_param(graph_name, expected_graph_name_param): "data, expected_value_type, expected_should_close", [ [ - open("test/test_rdf4j/test_unit/repository/test_repo_delete.py", "rb"), + open(Path(__file__), "rb"), io.BufferedReader, False, ], From 578f0cf1709413eaa1a62e46320e87a09aa8d4b3 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 14:06:36 +1000 Subject: [PATCH 41/54] feat: Add Transaction.size method --- rdflib/contrib/rdf4j/client.py | 55 ++++++++++----- .../test_e2e/test_e2e_repo_transaction.py | 2 + .../repository/test_repo_transaction.py | 67 ++++++++++++------- 3 files changed, 82 insertions(+), 42 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index b2723c21f..d420b360c 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -436,8 +436,12 @@ def size( f"/repositories/{self.identifier}/size", params=params ) response.raise_for_status() + return self._to_size(response.text) + + @staticmethod + def _to_size(size: str): try: - value = int(response.text) + value = int(size) if value >= 0: return value raise ValueError(f"Invalid repository size: {value}") @@ -701,7 +705,7 @@ def delete( @contextlib.contextmanager def transaction(self): """Create a new transaction for the repository.""" - with Transaction(self.identifier, self.http_client) as txn: + with Transaction(self) as txn: yield txn @@ -713,9 +717,8 @@ class Transaction: http_client: The httpx.Client instance. """ - def __init__(self, identifier: str, http_client: httpx.Client): - self._identifier = identifier - self._http_client = http_client + def __init__(self, repo: Repository): + self._repo = repo self._url: str | None = self._start_transaction() def __enter__(self): @@ -726,13 +729,9 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() @property - def http_client(self): - return self._http_client - - @property - def identifier(self): - """Repository identifier.""" - return self._identifier + def repo(self): + """The repository instance.""" + return self._repo @property def url(self): @@ -744,8 +743,8 @@ def _raise_for_closed(self): raise TransactionClosedError("The transaction has been closed.") def _start_transaction(self) -> str: - response = self.http_client.post( - f"/repositories/{self.identifier}/transactions" + response = self.repo.http_client.post( + f"/repositories/{self.repo.identifier}/transactions" ) response.raise_for_status() return response.headers["Location"] @@ -762,7 +761,7 @@ def commit(self): """ self._raise_for_closed() params = {"action": "COMMIT"} - response = self.http_client.put(self.url, params=params) + response = self.repo.http_client.put(self.url, params=params) if response.status_code != 200: raise TransactionCommitError( f"Transaction commit failed: {response.status_code} - {response.text}" @@ -778,12 +777,36 @@ def ping(self): """ self._raise_for_closed() params = {"action": "PING"} - response = self.http_client.put(self.url, params=params) + response = self.repo.http_client.put(self.url, params=params) if response.status_code != 200: raise TransactionPingError( f"Transaction ping failed: {response.status_code} - {response.text}" ) + def size(self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None): + """The number of statements in the repository or in the specified graph name. + + Parameters: + graph_name: Graph name(s) to restrict to. + + The default value `None` queries all graphs. + + To query just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + Returns: + The number of statements. + + Raises: + RepositoryFormatError: Fails to parse the repository size. + """ + self._raise_for_closed() + params = {"action": "SIZE"} + build_context_param(params, graph_name) + response = self.repo.http_client.put(self.url, params=params) + response.raise_for_status() + return self.repo._to_size(response.text) + class RepositoryManager: """A client to manage server-level repository operations. diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index 3adb2598e..83a3d587a 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -13,6 +13,8 @@ def test_e2e_repo_transaction(repo: Repository): with repo.transaction() as txn: txn.ping() + assert txn.size() == 2 + assert txn.size("urn:graph:a") == 1 with pytest.raises(TransactionClosedError): txn.ping() diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction.py index 3175d1dda..9efcd422c 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction.py @@ -6,7 +6,7 @@ import pytest from rdflib.contrib.rdf4j.client import ( - Repository, + Repository, Transaction, ) from rdflib.contrib.rdf4j.exceptions import ( TransactionClosedError, @@ -14,6 +14,21 @@ ) +@pytest.fixture +def txn(repo: Repository, monkeypatch: pytest.MonkeyPatch): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + yield txn + mock_commit_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_commit_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + + def test_repo_transaction_commit( repo: Repository, monkeypatch: pytest.MonkeyPatch, @@ -54,32 +69,32 @@ def test_repo_transaction_commit( txn.ping() -def test_repo_transaction_ping(repo: Repository, monkeypatch: pytest.MonkeyPatch): - transaction_url = "http://example.com/transaction/1" - mock_transaction_create_response = Mock( - spec=httpx.Response, headers={"Location": transaction_url} +def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + # Test a successful ping. + mock_ping_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.ping() + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "PING"}, ) - mock_httpx_post = Mock(return_value=mock_transaction_create_response) - monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) - with repo.transaction() as txn: - # Test a successful ping. - mock_ping_response = Mock(spec=httpx.Response, status_code=200) - mock_httpx_put = Mock(return_value=mock_ping_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + + # Ensure it raises TransactionPingError. + mock_ping_response = Mock(spec=httpx.Response, status_code=405) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + with pytest.raises(TransactionPingError): txn.ping() - mock_httpx_put.assert_called_once_with( - transaction_url, - params={"action": "PING"}, - ) - # Ensure it raises TransactionPingError. - mock_ping_response = Mock(spec=httpx.Response, status_code=405) - mock_httpx_put = Mock(return_value=mock_ping_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - with pytest.raises(TransactionPingError): - txn.ping() - # Mock successful commit. - mock_commit_response = Mock(spec=httpx.Response, status_code=200) - mock_httpx_put = Mock(return_value=mock_commit_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) +def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response, text="10") + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + size = txn.size() + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "SIZE"}, + ) + assert size == 10 From af504f98415f02a5c4da45664ee91f4327a90db9 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 15:47:33 +1000 Subject: [PATCH 42/54] feat: Add Transaction rollback, add, and query methods --- rdflib/contrib/rdf4j/client.py | 106 +++++++++++++++--- rdflib/contrib/rdf4j/exceptions.py | 4 + rdflib/contrib/rdf4j/util.py | 27 ++++- .../test_e2e/test_e2e_repo_transaction.py | 29 ++++- .../test_unit/repository/conftest.py | 17 +++ .../test_unit/repository/test_repo_query.py | 10 -- .../repository/test_repo_transaction.py | 100 ----------------- .../test_repo_transaction_commit.py | 53 +++++++++ .../repository/test_repo_transaction_ping.py | 32 ++++++ .../repository/test_repo_transaction_query.py | 92 +++++++++++++++ .../test_repo_transaction_rollback.py | 52 +++++++++ .../repository/test_repo_transaction_size.py | 22 ++++ .../test_unit/util/test_rdf4j_util.py | 18 ++- 13 files changed, 429 insertions(+), 133 deletions(-) delete mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index d420b360c..9da8571f2 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -22,15 +22,16 @@ TransactionClosedError, TransactionCommitError, TransactionPingError, + TransactionRollbackError, ) from rdflib.contrib.rdf4j.util import ( build_context_param, build_infer_param, + build_sparql_query_accept_header, build_spo_param, rdf_payload_to_stream, ) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph -from rdflib.plugins.sparql import prepareQuery from rdflib.query import Result from rdflib.term import IdentifiedNode, Literal, URIRef @@ -464,14 +465,8 @@ def query(self, query: str, **kwargs): [RDF4J REST API - Execute SPARQL query](https://rdf4j.org/documentation/reference/rest-api/#tag/SPARQL/paths/~1repositories~1%7BrepositoryID%7D/get) for the list of supported query parameters. """ - prepared_query = prepareQuery(query) headers = {"Content-Type": "application/sparql-query"} - if prepared_query.algebra.name in ("SelectQuery", "AskQuery"): - headers["Accept"] = "application/sparql-results+json" - elif prepared_query.algebra.name in ("ConstructQuery", "DescribeQuery"): - headers["Accept"] = "application/n-triples" - else: - raise ValueError(f"Unsupported query type: {prepared_query.algebra.name}") + build_sparql_query_accept_header(query, headers) if not kwargs: response = self.http_client.post( @@ -598,8 +593,6 @@ def get( except Exception as err: raise RDFLibParserError(f"Error parsing RDF: {err}") from err - # TODO: This only covers appending statements to a repository. - # We still need to implement sparql update and transaction document. def upload( self, data: str | bytes | BinaryIO | Graph | Dataset, @@ -713,20 +706,26 @@ class Transaction: """An RDF4J transaction. Parameters: - identifier: The identifier of the repository. - http_client: The httpx.Client instance. + repo: The repository instance. """ def __init__(self, repo: Repository): self._repo = repo - self._url: str | None = self._start_transaction() + self._url: str | None = None def __enter__(self): + self._url = self._start_transaction() return self def __exit__(self, exc_type, exc_val, exc_tb): - if self._url is not None: - self.commit() + if not self.is_closed: + if exc_type is None: + self.commit() + else: + self.rollback() + + # Propagate errors. + return False @property def repo(self): @@ -738,8 +737,13 @@ def url(self): """The transaction URL.""" return self._url + @property + def is_closed(self) -> bool: + """Whether the transaction is closed.""" + return self._url is None + def _raise_for_closed(self): - if self._url is None: + if self.is_closed: raise TransactionClosedError("The transaction has been closed.") def _start_transaction(self) -> str: @@ -752,6 +756,10 @@ def _start_transaction(self) -> str: def _close_transaction(self): self._url = None + def open(self): + """Opens a transaction.""" + self._url = self._start_transaction() + def commit(self): """Commit the transaction. @@ -768,6 +776,15 @@ def commit(self): ) self._close_transaction() + def rollback(self): + """Roll back the transaction.""" + response = self.repo.http_client.delete(self.url) + if response.status_code != 204: + raise TransactionRollbackError( + f"Transaction rollback failed: {response.status_code} - {response.text}" + ) + self._close_transaction() + def ping(self): """Ping the transaction. @@ -783,7 +800,9 @@ def ping(self): f"Transaction ping failed: {response.status_code} - {response.text}" ) - def size(self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None): + def size( + self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None + ): """The number of statements in the repository or in the specified graph name. Parameters: @@ -807,6 +826,59 @@ def size(self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | Non response.raise_for_status() return self.repo._to_size(response.text) + def query(self, query: str, **kwargs): + """Execute a SPARQL query against the repository. + + Parameters: + query: The SPARQL query to execute. + **kwargs: Additional keyword arguments to include as query parameters + in the request. See + [RDF4J REST API - Execute SPARQL query](https://rdf4j.org/documentation/reference/rest-api/#tag/SPARQL/paths/~1repositories~1%7BrepositoryID%7D/get) + for the list of supported query parameters. + """ + headers: dict[str, str] = {} + build_sparql_query_accept_header(query, headers) + params = {"action": "QUERY", "query": query} + response = self.repo.http_client.put( + self.url, headers=headers, params={**params, **kwargs} + ) + response.raise_for_status() + return Result.parse( + io.BytesIO(response.content), + content_type=response.headers["Content-Type"].split(";")[0], + ) + + def upload( + self, + data: str | bytes | BinaryIO | Graph | Dataset, + base_uri: str | None = None, + content_type: str | None = None, + ): + """Upload and append statements to the repository. + + Parameters: + data: The RDF data to upload. + base_uri: The base URI to resolve against for any relative URIs in the data. + content_type: The content type of the data. Defaults to + `application/n-quads` when the value is `None`. + """ + stream, should_close = rdf_payload_to_stream(data) + try: + headers = {"Content-Type": content_type or "application/n-quads"} + params = {"action": "ADD"} + if base_uri is not None: + params["baseURI"] = base_uri + response = self.repo.http_client.put( + self.url, + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + finally: + if should_close: + stream.close() + class RepositoryManager: """A client to manage server-level repository operations. diff --git a/rdflib/contrib/rdf4j/exceptions.py b/rdflib/contrib/rdf4j/exceptions.py index b09e093e1..273ae5e6d 100644 --- a/rdflib/contrib/rdf4j/exceptions.py +++ b/rdflib/contrib/rdf4j/exceptions.py @@ -43,3 +43,7 @@ class TransactionPingError(RepositoryTransactionError): class TransactionCommitError(RepositoryTransactionError): """Raised when there is an error committing the transaction.""" + + +class TransactionRollbackError(RepositoryTransactionError): + """Raised when there is an error rolling back the transaction.""" diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index d6eefc1ec..ea370ad6d 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -6,6 +6,7 @@ import typing as t from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph +from rdflib.plugins.sparql.processor import prepareQuery from rdflib.term import IdentifiedNode, URIRef if t.TYPE_CHECKING: @@ -21,7 +22,7 @@ def build_context_param( !!! Note This mutates the params dictionary key `context`. - Args: + Parameters: params: The `httpx.Request` parameter dictionary. graph_name: The graph name or iterable of graph names. @@ -52,7 +53,7 @@ def build_spo_param( !!! Note This mutates the params dictionary key `subj`, `pred`, and `obj`. - Args: + Parameters: params: The `httpx.Request` parameter dictionary. subj: The `subj` query parameter value. pred: The `pred` query parameter value. @@ -75,7 +76,7 @@ def build_infer_param( !!! Note This mutates the params dictionary key `infer`. - Args: + Parameters: params: The `httpx.Request` parameter dictionary. infer: The `infer` query parameter value. """ @@ -88,7 +89,7 @@ def rdf_payload_to_stream( ) -> tuple[t.BinaryIO, bool]: """Convert an RDF payload into a file-like object. - Args: + Parameters: data: The RDF payload. This can be a python `str`, `bytes`, `BinaryIO`, or a @@ -132,3 +133,21 @@ def rdf_payload_to_stream( should_close = False return stream, should_close + + +def build_sparql_query_accept_header(query: str, headers: dict[str, str]): + """Build the SPARQL query accept header. + + !!! Note + This mutates the headers dictionary key `Accept`. + + Parameters: + query: The SPARQL query. + """ + prepared_query = prepareQuery(query) + if prepared_query.algebra.name in ("SelectQuery", "AskQuery"): + headers["Accept"] = "application/sparql-results+json" + elif prepared_query.algebra.name in ("ConstructQuery", "DescribeQuery"): + headers["Accept"] = "application/n-triples" + else: + raise ValueError(f"Unsupported query type: {prepared_query.algebra.name}") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index 83a3d587a..8aabf08ed 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -2,8 +2,9 @@ import pytest -from rdflib.contrib.rdf4j.client import Repository +from rdflib.contrib.rdf4j.client import Repository, Transaction from rdflib.contrib.rdf4j.exceptions import TransactionClosedError +from rdflib.term import Variable, Literal def test_e2e_repo_transaction(repo: Repository): @@ -16,5 +17,31 @@ def test_e2e_repo_transaction(repo: Repository): assert txn.size() == 2 assert txn.size("urn:graph:a") == 1 + # Open a transaction without a context manager + txn = Transaction(repo) + txn.open() + assert txn.size() == 2 + txn.rollback() + assert txn.url is None + + # Raises an error as the transaction is closed. with pytest.raises(TransactionClosedError): txn.ping() + + path = str(Path(__file__).parent.parent / "data/quads-2.nq") + with repo.transaction() as txn: + query = "select (count(*) as ?count) where {?s ?p ?o}" + result = txn.query(query) + # Before upload, the number of statements is 2. + assert result.bindings[0][Variable("count")] == Literal(2) + # Add data. + txn.upload(path) + assert txn.size() == 3 + result = txn.query(query) + # Now it's 3. + assert result.bindings[0][Variable("count")] == Literal(3) + # Repo is still 2 as we've not yet committed. + assert repo.size() == 2 + + # Transaction committed, size is now 3. + assert repo.size() == 3 diff --git a/test/test_rdf4j/test_unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py index 12951e377..0278e9104 100644 --- a/test/test_rdf4j/test_unit/repository/conftest.py +++ b/test/test_rdf4j/test_unit/repository/conftest.py @@ -1,5 +1,7 @@ from __future__ import annotations +from unittest.mock import Mock + import httpx import pytest @@ -26,3 +28,18 @@ def repo(client: RDF4JClient, monkeypatch: pytest.MonkeyPatch): repo = client.repositories.create("test-repo", "") assert repo.identifier == "test-repo" yield repo + + +@pytest.fixture +def txn(repo: Repository, monkeypatch: pytest.MonkeyPatch): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + yield txn + mock_commit_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_commit_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py index d6ec1da50..9aa3f9343 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py @@ -94,16 +94,6 @@ def test_repo_query( assert False, "Unexpected result type" -def test_repo_query_value_error(repo: Repository, monkeypatch: pytest.MonkeyPatch): - mock_query = Mock() - mock_query.algebra.name = "InvalidQueryType" - monkeypatch.setattr( - "rdflib.contrib.rdf4j.client.prepareQuery", lambda _: mock_query - ) - with pytest.raises(ValueError, match="Unsupported query type: InvalidQueryType"): - repo.query("") - - def test_repo_query_kwargs(repo: Repository, monkeypatch: pytest.MonkeyPatch): """The query method uses GET if a keyword argument is provided.""" mock_response = Mock( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction.py deleted file mode 100644 index 9efcd422c..000000000 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction.py +++ /dev/null @@ -1,100 +0,0 @@ -from __future__ import annotations - -from unittest.mock import Mock - -import httpx -import pytest - -from rdflib.contrib.rdf4j.client import ( - Repository, Transaction, -) -from rdflib.contrib.rdf4j.exceptions import ( - TransactionClosedError, - TransactionPingError, -) - - -@pytest.fixture -def txn(repo: Repository, monkeypatch: pytest.MonkeyPatch): - transaction_url = "http://example.com/transaction/1" - mock_transaction_create_response = Mock( - spec=httpx.Response, headers={"Location": transaction_url} - ) - mock_httpx_post = Mock(return_value=mock_transaction_create_response) - monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) - with repo.transaction() as txn: - yield txn - mock_commit_response = Mock(spec=httpx.Response, status_code=200) - mock_httpx_put = Mock(return_value=mock_commit_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - - -def test_repo_transaction_commit( - repo: Repository, - monkeypatch: pytest.MonkeyPatch, -): - transaction_url = "http://example.com/transaction/1" - mock_transaction_create_response = Mock( - spec=httpx.Response, headers={"Location": transaction_url} - ) - mock_httpx_post = Mock(return_value=mock_transaction_create_response) - monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) - with repo.transaction() as txn: - # Ensure the transaction is created. - assert txn.url == transaction_url - mock_httpx_post.assert_called_once_with( - "/repositories/test-repo/transactions", - ) - - # Mock commit response. - mock_transaction_commit_response = Mock(spec=httpx.Response, status_code=200) - mock_httpx_put = Mock(return_value=mock_transaction_commit_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - # Explicitly commit. This closes the transaction. - txn.commit() - mock_httpx_put.assert_called_once_with( - transaction_url, - params={"action": "COMMIT"}, - ) - # Ensure it is closed. - assert txn.url is None - with pytest.raises(TransactionClosedError): - txn.ping() - - with repo.transaction() as txn: - txn.ping() - - with pytest.raises(TransactionClosedError): - # Ensure that the context manager closes the transaction. - txn.ping() - - -def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): - # Test a successful ping. - mock_ping_response = Mock(spec=httpx.Response, status_code=200) - mock_httpx_put = Mock(return_value=mock_ping_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - txn.ping() - mock_httpx_put.assert_called_once_with( - txn.url, - params={"action": "PING"}, - ) - - # Ensure it raises TransactionPingError. - mock_ping_response = Mock(spec=httpx.Response, status_code=405) - mock_httpx_put = Mock(return_value=mock_ping_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - with pytest.raises(TransactionPingError): - txn.ping() - - -def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch): - mock_response = Mock(spec=httpx.Response, text="10") - mock_httpx_put = Mock(return_value=mock_response) - monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - size = txn.size() - mock_httpx_put.assert_called_once_with( - txn.url, - params={"action": "SIZE"}, - ) - assert size == 10 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py new file mode 100644 index 000000000..b05f6c35e --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Repository, +) +from rdflib.contrib.rdf4j.exceptions import ( + TransactionClosedError, +) + + +def test_repo_transaction_commit( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, +): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + # Ensure the transaction is created. + assert txn.url == transaction_url + mock_httpx_post.assert_called_once_with( + "/repositories/test-repo/transactions", + ) + + # Mock commit response. + mock_transaction_commit_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_transaction_commit_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + # Explicitly commit. This closes the transaction. + txn.commit() + mock_httpx_put.assert_called_once_with( + transaction_url, + params={"action": "COMMIT"}, + ) + # Ensure it is closed. + assert txn.url is None + with pytest.raises(TransactionClosedError): + txn.ping() + + with repo.transaction() as txn: + txn.ping() + + with pytest.raises(TransactionClosedError): + # Ensure that the context manager closes the transaction. + txn.ping() diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py new file mode 100644 index 000000000..f47de348b --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Transaction, +) +from rdflib.contrib.rdf4j.exceptions import ( + TransactionPingError, +) + + +def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + # Test a successful ping. + mock_ping_response = Mock(spec=httpx.Response, status_code=200) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.ping() + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "PING"}, + ) + + # Ensure it raises TransactionPingError. + mock_ping_response = Mock(spec=httpx.Response, status_code=405) + mock_httpx_put = Mock(return_value=mock_ping_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + with pytest.raises(TransactionPingError): + txn.ping() diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py new file mode 100644 index 000000000..a447a30eb --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Transaction, +) +from rdflib.graph import Graph +from rdflib.term import URIRef, Variable + + +@pytest.mark.parametrize( + "query, accept_header, response_text, expected_result_type", + [ + [ + "select ?s where { ?s ?p ?o }", + "application/sparql-results+json", + """ + { + "head": { + "vars": ["s"] + }, + "results": { + "bindings": [{"s": {"value": "http://example.com/s", "type": "uri"}}] + } + } + """, + "SELECT", + ], + [ + "ask where { ?s ?p ?o }", + "application/sparql-results+json", + '{ "boolean": true }', + "ASK", + ], + [ + "construct { ?s ?p ?o } where { ?s ?p ?o }", + "application/n-triples", + " .", + "CONSTRUCT", + ], + [ + "describe ?s", + "application/n-triples", + " .", + "CONSTRUCT", + ], + ], +) +def test_repo_transaction_query( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + query: str, + accept_header: str, + response_text: str, + expected_result_type, +): + mock_response = Mock( + spec=httpx.Response, + content=response_text.encode("utf-8"), + headers={"Content-Type": accept_header}, + ) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + result = txn.query(query, infer="true") + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "QUERY", "query": query, "infer": "true"}, + headers={"Accept": accept_header}, + ) + + if expected_result_type == "SELECT": + assert len(result) == 1 + s_var = Variable("s") + assert result.vars == [s_var] + assert result.bindings[0].get(s_var) == URIRef("http://example.com/s") + elif expected_result_type == "ASK": + assert result.askAnswer is True + elif expected_result_type == "CONSTRUCT": + assert len(result.graph) == 1 + assert ( + Graph() + .parse( + data=" ." + ) + .isomorphic(result.graph) + ) + else: + assert False, "Unexpected result type" diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py new file mode 100644 index 000000000..9cb92b739 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Repository, +) +from rdflib.contrib.rdf4j.exceptions import ( + TransactionClosedError, + TransactionPingError, +) + + +def test_repo_transaction_rollback( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, +): + transaction_url = "http://example.com/transaction/1" + mock_transaction_create_response = Mock( + spec=httpx.Response, headers={"Location": transaction_url} + ) + mock_httpx_post = Mock(return_value=mock_transaction_create_response) + monkeypatch.setattr(httpx.Client, "post", mock_httpx_post) + with repo.transaction() as txn: + mock_rollback_response = Mock(spec=httpx.Response, status_code=204) + mock_httpx_delete = Mock(return_value=mock_rollback_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete) + txn.rollback() + assert txn.url is None + mock_httpx_delete.assert_called_once_with( + transaction_url, + ) + with pytest.raises(TransactionClosedError): + txn.ping() + + mock_rollback_response = Mock(spec=httpx.Response, status_code=204) + mock_httpx_delete = Mock(return_value=mock_rollback_response) + monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete) + with pytest.raises(TransactionPingError): + with repo.transaction() as txn: + mock_response = Mock(spec=httpx.Response, status_code=405) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.ping() + + # Confirm transaction rollback is performed automatically. + mock_httpx_delete.assert_called_once_with( + transaction_url, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py new file mode 100644 index 000000000..45315b0b5 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Transaction, +) + + +def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response, text="10") + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + size = txn.size() + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "SIZE"}, + ) + assert size == 10 diff --git a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py index fcc978bed..7fd742cf5 100644 --- a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py +++ b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py @@ -3,10 +3,17 @@ import io import typing as t from pathlib import Path +from unittest.mock import Mock import pytest -from rdflib.contrib.rdf4j.util import build_context_param, rdf_payload_to_stream +import rdflib.contrib.rdf4j.util +import rdflib.plugins.sparql.processor +from rdflib.contrib.rdf4j.util import ( + build_context_param, + build_sparql_query_accept_header, + rdf_payload_to_stream, +) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.term import BNode, URIRef @@ -57,3 +64,12 @@ def test_rdf_payload_to_stream( value, should_close = rdf_payload_to_stream(data) assert isinstance(value, expected_value_type) assert should_close == expected_should_close + + +def test_build_sparql_query_accept_header(monkeypatch: pytest.MonkeyPatch): + mock = Mock() + mock.algebra.name = "InvalidQueryType" + monkeypatch.setattr(rdflib.contrib.rdf4j.util, "prepareQuery", lambda _: mock) + + with pytest.raises(ValueError, match="Unsupported query type: InvalidQueryType"): + build_sparql_query_accept_header("blah", {}) From c7d66004c627f8de1f1b71dfc8796614365dffc2 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 16:25:33 +1000 Subject: [PATCH 43/54] feat: Add Transaction delete, and also Transaction upload test --- rdflib/contrib/rdf4j/client.py | 45 +++++++++++++++++-- .../test_e2e/test_e2e_repo_transaction.py | 16 ++++++- .../test_repo_transaction_delete.py | 23 ++++++++++ .../test_repo_transaction_upload.py | 19 ++++++++ 4 files changed, 98 insertions(+), 5 deletions(-) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 9da8571f2..e101ad6ea 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -863,11 +863,48 @@ def upload( `application/n-quads` when the value is `None`. """ stream, should_close = rdf_payload_to_stream(data) + headers = {"Content-Type": content_type or "application/n-quads"} + params = {"action": "ADD"} + if base_uri is not None: + params["baseURI"] = base_uri + try: + response = self.repo.http_client.put( + self.url, + headers=headers, + params=params, + content=stream, + ) + response.raise_for_status() + finally: + if should_close: + stream.close() + + def delete( + self, + data: str | bytes | BinaryIO | Graph | Dataset, + base_uri: str | None = None, + content_type: str | None = None, + ) -> None: + """Delete statements from the repository. + + !!! Note + This function operates differently to [`Repository.delete`][] as it does + not use filter parameters. Instead, it expects a data payload. + See the notes from [graphdb.js#Deleting](https://github.com/Ontotext-AD/graphdb.js?tab=readme-ov-file#deleting-1) + for more information. + + Parameters: + data: The RDF data to upload. + base_uri: The base URI to resolve against for any relative URIs in the data. + content_type: The content type of the data. Defaults to + `application/n-quads` when the value is `None`. + """ + params: dict[str, str] = {"action": "DELETE"} + stream, should_close = rdf_payload_to_stream(data) + headers = {"Content-Type": content_type or "application/n-quads"} + if base_uri is not None: + params["baseURI"] = base_uri try: - headers = {"Content-Type": content_type or "application/n-quads"} - params = {"action": "ADD"} - if base_uri is not None: - params["baseURI"] = base_uri response = self.repo.http_client.put( self.url, headers=headers, diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index 8aabf08ed..e478fca4c 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -4,7 +4,7 @@ from rdflib.contrib.rdf4j.client import Repository, Transaction from rdflib.contrib.rdf4j.exceptions import TransactionClosedError -from rdflib.term import Variable, Literal +from rdflib.term import Literal, Variable def test_e2e_repo_transaction(repo: Repository): @@ -45,3 +45,17 @@ def test_e2e_repo_transaction(repo: Repository): # Transaction committed, size is now 3. assert repo.size() == 3 + + +def test_e2e_repo_transaction_delete(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + data = " ." + repo.upload(data) + assert repo.size() == 3 + assert repo.size("urn:graph:a2") == 1 + + with repo.transaction() as txn: + txn.delete(data) + assert txn.size() == 2 + assert txn.size("urn:graph:a2") == 0 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py new file mode 100644 index 000000000..5e2105203 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Transaction, +) + + +def test_repo_transaction_delete(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.delete("") + mock_httpx_put.assert_called_once_with( + txn.url, + headers={"Content-Type": "application/n-quads"}, + params={"action": "DELETE"}, + content=ANY, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py new file mode 100644 index 000000000..387ceef40 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py @@ -0,0 +1,19 @@ +from unittest.mock import ANY, Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import Transaction + + +def test_repo_transaction_upload(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + txn.upload("") + mock_httpx_put.assert_called_once_with( + txn.url, + headers={"Content-Type": "application/n-quads"}, + params={"action": "ADD"}, + content=ANY, + ) From 2e456221d3ce4249e0f057b495a99d0192163640 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 16:38:33 +1000 Subject: [PATCH 44/54] feat: Add Transaction update --- rdflib/contrib/rdf4j/client.py | 16 ++++++++++++++ .../test_e2e/test_e2e_repo_transaction.py | 12 ++++++++++ .../test_repo_transaction_update.py | 22 +++++++++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index e101ad6ea..22e6da48e 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -848,6 +848,22 @@ def query(self, query: str, **kwargs): content_type=response.headers["Content-Type"].split(";")[0], ) + def update(self, query: str, **kwargs): + """Execute a SPARQL update operation on the repository. + + Parameters: + query: The SPARQL update query to execute. + **kwargs: Additional keyword arguments to include as query parameters + See [RDF4J REST API - Execute a transaction action](https://rdf4j.org/documentation/reference/rest-api/#tag/Transactions/paths/~1repositories~1%7BrepositoryID%7D~1transactions~1%7BtransactionID%7D/put) + for the list of supported query parameters. + """ + params = {"action": "UPDATE", "update": query} + response = self.repo.http_client.put( + self.url, + params={**params, **kwargs}, + ) + response.raise_for_status() + def upload( self, data: str | bytes | BinaryIO | Graph | Dataset, diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index e478fca4c..d11bf023a 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -59,3 +59,15 @@ def test_e2e_repo_transaction_delete(repo: Repository): txn.delete(data) assert txn.size() == 2 assert txn.size("urn:graph:a2") == 0 + + +def test_e2e_repo_transaction_update(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + assert repo.size() == 2 + + query = "INSERT DATA { GRAPH { } }" + with repo.transaction() as txn: + txn.update(query) + assert txn.size() == 3 + assert txn.size("urn:graph:a2") == 1 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py new file mode 100644 index 000000000..313d7cb77 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib.contrib.rdf4j.client import ( + Transaction, +) + + +def test_repo_update(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock(spec=httpx.Response, status_code=204) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + query = "insert data { }" + txn.update(query) + mock_httpx_put.assert_called_once_with( + txn.url, + params={"action": "UPDATE", "update": query}, + ) From 0a680fbf821768a80826a2b6f3b87b62b117bd2f Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Thu, 6 Nov 2025 17:03:29 +1000 Subject: [PATCH 45/54] feat: Add Transaction get --- rdflib/contrib/rdf4j/client.py | 62 +++++ .../test_e2e/test_e2e_repo_transaction.py | 26 +- .../repository/test_repo_transaction_get.py | 231 ++++++++++++++++++ 3 files changed, 318 insertions(+), 1 deletion(-) create mode 100644 test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 22e6da48e..011965ba6 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -895,6 +895,68 @@ def upload( if should_close: stream.close() + def get( + self, + subj: SubjectType = None, + pred: PredicateType = None, + obj: ObjectType = None, + graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + infer: bool = True, + content_type: str | None = None, + ) -> Graph | Dataset: + """Get RDF statements from the repository matching the filtering parameters. + + Parameters: + subj: Subject of the statement to filter by, or `None` to match all. + pred: Predicate of the statement to filter by, or `None` to match all. + obj: Object of the statement to filter by, or `None` to match all. + graph_name: Graph name(s) to restrict to. + + The default value `None` queries all graphs. + + To query just the default graph, use + [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. + + infer: Specifies whether inferred statements should be included in the + result. + content_type: The content type of the response. + A triple-based format returns a [Graph][rdflib.graph.Graph], while a + quad-based format returns a [`Dataset`][rdflib.graph.Dataset]. + + Returns: + A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object + with the repository namespace prefixes bound to it. + """ + if content_type is None: + content_type = "application/n-quads" + headers = {"Accept": content_type} + params: dict[str, str] = {"action": "GET"} + build_context_param(params, graph_name) + build_spo_param(params, subj, pred, obj) + build_infer_param(params, infer=infer) + + response = self.repo.http_client.put( + self.url, + headers=headers, + params=params, + ) + response.raise_for_status() + triple_formats = [ + "application/n-triples", + "text/turtle", + "application/rdf+xml", + ] + try: + if content_type in triple_formats: + retval = Graph().parse(data=response.text, format=content_type) + else: + retval = Dataset().parse(data=response.text, format=content_type) + for result in self.repo.namespaces.list(): + retval.bind(result.prefix, result.namespace, replace=True) + return retval + except Exception as err: + raise RDFLibParserError(f"Error parsing RDF: {err}") from err + def delete( self, data: str | bytes | BinaryIO | Graph | Dataset, diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index d11bf023a..f62723af1 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -4,7 +4,7 @@ from rdflib.contrib.rdf4j.client import Repository, Transaction from rdflib.contrib.rdf4j.exceptions import TransactionClosedError -from rdflib.term import Literal, Variable +from rdflib.term import Literal, URIRef, Variable def test_e2e_repo_transaction(repo: Repository): @@ -71,3 +71,27 @@ def test_e2e_repo_transaction_update(repo: Repository): txn.update(query) assert txn.size() == 3 assert txn.size("urn:graph:a2") == 1 + + +def test_e2e_repo_transaction_get(repo: Repository): + path = str(Path(__file__).parent.parent / "data/quads-1.nq") + repo.overwrite(path) + assert repo.size() == 2 + + with repo.transaction() as txn: + ds = txn.get() + assert len(ds) == 2 + + repo.upload(str(Path(__file__).parent.parent / "data/quads-2.nq")) + repo.upload(str(Path(__file__).parent.parent / "data/quads-3.nq")) + assert repo.size() == 4 + + with repo.transaction() as txn: + ds = txn.get() + assert len(ds) == 4 + + ds = txn.get(graph_name="urn:graph:a") + assert len(ds) == 1 + + ds = txn.get(pred=URIRef("http://example.org/p")) + assert len(ds) == 2 diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py new file mode 100644 index 000000000..27deee049 --- /dev/null +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -0,0 +1,231 @@ +from __future__ import annotations + +import typing as t +from unittest.mock import Mock + +import httpx +import pytest + +from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef +from rdflib.contrib.rdf4j.client import ( + NamespaceManager, + ObjectType, + PredicateType, + SubjectType, + Transaction, +) +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID + + +def test_repo_transaction_get(txn: Transaction, monkeypatch: pytest.MonkeyPatch): + mock_response = Mock( + spec=httpx.Response, + text=" .", + ) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + txn.get(pred=URIRef("http://example.org/p")) + mock_httpx_put.assert_called_once_with( + txn.url, + headers={"Accept": "application/n-quads"}, + params={"action": "GET", "pred": ""}, + ) + + +@pytest.mark.parametrize( + "content_type, data, expected_class_type", + [ + [ + None, + " .", + Dataset, + ], + [ + "application/trig", + " { . }", + Dataset, + ], + [ + "application/n-triples", + " .", + Graph, + ], + [ + "text/turtle", + " .", + Graph, + ], + [ + "application/rdf+xml", + """ + + + + + +""", + Graph, + ], + ], +) +def test_repo_transaction_get_content_type( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + content_type: str | None, + data: str, + expected_class_type: type, +): + """ + Test that the content type is set correctly on the request and that the response is + parsed correctly. + """ + mock_response = Mock(spec=httpx.Response, text=data) + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + + result = txn.get(content_type=content_type) + headers = {"Accept": content_type or "application/n-quads"} + params: dict[str, str] = {"action": "GET"} + mock_httpx_put.assert_called_once_with( + txn.url, + headers=headers, + params=params, + ) + assert isinstance(result, expected_class_type) + + +@pytest.mark.parametrize( + "graph_name, expected_graph_name_param", + [ + [DATASET_DEFAULT_GRAPH_ID, "null"], + ["http://example.com/graph", ""], + [URIRef("http://example.com/graph"), ""], + [BNode("some-bnode"), "_:some-bnode"], + [ + [URIRef("http://example.com/graph"), BNode("some-bnode")], + ",_:some-bnode", + ], + [None, None], + ], +) +def test_repo_transaction_get_graph_name( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + expected_graph_name_param: str, +): + """ + Test that graph_name is passed as a query parameter and correctly handles the + different type variations. + """ + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + headers = { + "Accept": "application/n-quads", + } + if graph_name is None: + params = {} + else: + params = {"context": expected_graph_name_param} + params["action"] = "GET" + txn.get(graph_name=graph_name) + mock_httpx_put.assert_called_once_with( + txn.url, + headers=headers, + params=params, + ) + + +@pytest.mark.parametrize("infer, expected_value", [[True, KeyError], [False, "false"]]) +def test_repo_transaction_get_infer( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + infer: bool, + expected_value: Exception | str, +): + """Test that the "infer" query parameter is set correctly.""" + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + headers = { + "Accept": "application/n-quads", + } + + params = {"action": "GET"} + if isinstance(expected_value, str): + params["infer"] = expected_value + + txn.get(infer=infer) + mock_httpx_put.assert_called_once_with( + txn.url, + headers=headers, + params=params, + ) + + +@pytest.mark.parametrize( + "subj, pred, obj, expected_params", + [ + [ + URIRef("http://example.com/s"), + URIRef("http://example.com/p"), + URIRef("http://example.com/o"), + { + "action": "GET", + "subj": "", + "pred": "", + "obj": "", + }, + ], + [ + None, + None, + None, + { + "action": "GET", + }, + ], + [ + BNode("some-bnode"), + URIRef("http://example.com/p"), + BNode("some-bnode-2"), + { + "action": "GET", + "subj": "_:some-bnode", + "pred": "", + "obj": "_:some-bnode-2", + }, + ], + ], +) +def test_repo_transaction_get_spo( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + subj: SubjectType, + pred: PredicateType, + obj: ObjectType, + expected_params: dict[str, str], +): + """Test that the subj, pred, and obj query parameters are set correctly.""" + mock_response = Mock(spec=httpx.Response, text="") + mock_httpx_put = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) + monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + headers = { + "Accept": "application/n-quads", + } + + txn.get(subj=subj, pred=pred, obj=obj) + mock_httpx_put.assert_called_once_with( + txn.url, + headers=headers, + params=expected_params, + ) From c67f7c27a32baf4388c280061e25365f74be2e0d Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 10:59:35 +1000 Subject: [PATCH 46/54] test: update transaction tests for upload and delete --- .../test_repo_transaction_delete.py | 28 ++- .../test_repo_transaction_upload.py | 213 ++++++++++++++++++ 2 files changed, 237 insertions(+), 4 deletions(-) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py index 5e2105203..6a90944d3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py @@ -10,14 +10,34 @@ ) -def test_repo_transaction_delete(txn: Transaction, monkeypatch: pytest.MonkeyPatch): +@pytest.mark.parametrize( + "base_uri, content_type, expected_headers, expected_params", + [ + [None, None, {"Content-Type": "application/n-quads"}, {"action": "DELETE"}], + [ + "http://example.com/", + "text/turtle", + {"Content-Type": "text/turtle"}, + {"action": "DELETE", "baseURI": "http://example.com/"}, + ], + ], +) +def test_repo_transaction_delete( + txn: Transaction, + monkeypatch: pytest.MonkeyPatch, + base_uri: str | None, + content_type: str | None, + expected_headers: dict[str, str], + expected_params: dict[str, str], +): mock_response = Mock(spec=httpx.Response) mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - txn.delete("") + txn.delete("", base_uri, content_type) + mock_httpx_put.assert_called_once_with( txn.url, - headers={"Content-Type": "application/n-quads"}, - params={"action": "DELETE"}, + headers=expected_headers, + params=expected_params, content=ANY, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py index 387ceef40..fdba18422 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py @@ -1,8 +1,13 @@ +from __future__ import annotations + +import io +import pathlib from unittest.mock import ANY, Mock import httpx import pytest +from rdflib import Dataset, Graph from rdflib.contrib.rdf4j.client import Transaction @@ -17,3 +22,211 @@ def test_repo_transaction_upload(txn: Transaction, monkeypatch: pytest.MonkeyPat params={"action": "ADD"}, content=ANY, ) + + +@pytest.mark.parametrize("class_type", [Graph, Dataset]) +def test_repo_transaction_upload_graph( + txn: Transaction, monkeypatch: pytest.MonkeyPatch, class_type: type[Graph | Dataset] +): + """Test that the upload method handles Graphs and Datasets as data input.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + graph = class_type().parse(file_path) + txn.upload(graph) + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert content.closed + + +def test_repo_transaction_upload_file_path( + txn: Transaction, monkeypatch: pytest.MonkeyPatch +): + """Test that a file path is treated as a file to be read and closed when done.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + txn.upload(str(file_path), content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert hasattr(content, "read") + assert hasattr(content, "name") + assert content.name == str(file_path) + assert content.closed + + +def test_repo_transaction_upload_buffered_reader( + txn: Transaction, monkeypatch: pytest.MonkeyPatch +): + """Test that a file-like object is read and not closed when done.""" + file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + with open(file_path, "rb") as file: + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + txn.upload(file, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=file, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert not content.closed + + +@pytest.mark.parametrize( + "data", + [ + " .", + b" .", + ], +) +def test_repo_transaction_upload_data( + txn: Transaction, data: str | bytes, monkeypatch: pytest.MonkeyPatch +): + """Test that str and bytes data is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + txn.upload(data, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +@pytest.mark.parametrize( + "base_uri, expected_params", + [ + ["", {"baseURI": "", "action": "ADD"}], + ["http://example.com", {"baseURI": "http://example.com", "action": "ADD"}], + [None, {"action": "ADD"}], + ], +) +def test_repo_transaction_upload_base_uri( + txn: Transaction, + base_uri: str | None, + expected_params: dict[str, str], + monkeypatch: pytest.MonkeyPatch, +): + """Test that base_uri is passed as a query parameter.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + txn.upload("", base_uri=base_uri, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=expected_params, + content=ANY, + ) + + +def test_repo_transaction_upload_nonexistent_file_path( + txn: Transaction, monkeypatch: pytest.MonkeyPatch +): + """Test that a string that looks like a file path but doesn't exist is treated as content.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + nonexistent_path = "/nonexistent/path/file.nq" + txn.upload(nonexistent_path, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_transaction_upload_string_with_newline( + txn: Transaction, monkeypatch: pytest.MonkeyPatch +): + """Test that a string with newlines is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + data_with_newline = " .\n ." + txn.upload(data_with_newline, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed + + +def test_repo_transaction_upload_long_string( + txn: Transaction, monkeypatch: pytest.MonkeyPatch +): + """Test that a string longer than 260 characters is treated as content, not a file path.""" + mock = Mock() + monkeypatch.setattr(httpx.Client, "put", mock) + headers = { + "Content-Type": "application/n-quads", + } + params: dict[str, str] = {"action": "ADD"} + # Create a string longer than 260 characters + long_string = "a" * 261 + txn.upload(long_string, content_type="application/n-quads") + mock.assert_called_once_with( + txn.url, + headers=headers, + params=params, + content=ANY, + ) + call_args = mock.call_args + content = call_args.kwargs["content"] + assert isinstance(content, io.BytesIO) + assert not content.closed From c56a5b64945778b7bd204d5fd8f110034cb6c64d Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 11:16:05 +1000 Subject: [PATCH 47/54] fix: improve error handling --- rdflib/contrib/rdf4j/client.py | 44 ++++++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 011965ba6..3751fbdef 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -479,10 +479,15 @@ def query(self, query: str, **kwargs): params={"query": query, **kwargs}, ) response.raise_for_status() - return Result.parse( - io.BytesIO(response.content), - content_type=response.headers["Content-Type"].split(";")[0], - ) + try: + return Result.parse( + io.BytesIO(response.content), + content_type=response.headers["Content-Type"].split(";")[0], + ) + except KeyError as err: + raise RDFLibParserError( + f"Failed to parse SPARQL query result {response.headers.get('Content-Type')}: {err}" + ) from err def update(self, query: str): """Execute a SPARQL update operation on the repository. @@ -722,7 +727,10 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: self.commit() else: - self.rollback() + try: + self.rollback() + except Exception: + pass # Propagate errors. return False @@ -843,10 +851,15 @@ def query(self, query: str, **kwargs): self.url, headers=headers, params={**params, **kwargs} ) response.raise_for_status() - return Result.parse( - io.BytesIO(response.content), - content_type=response.headers["Content-Type"].split(";")[0], - ) + try: + return Result.parse( + io.BytesIO(response.content), + content_type=response.headers["Content-Type"].split(";")[0], + ) + except KeyError as err: + raise RDFLibParserError( + f"Failed to parse SPARQL query result {response.headers.get('Content-Type')}: {err}" + ) from err def update(self, query: str, **kwargs): """Execute a SPARQL update operation on the repository. @@ -1133,12 +1146,19 @@ def __init__( self._http_client = httpx.Client( base_url=base_url, auth=auth, timeout=timeout, **kwargs ) - if self.protocol < 12: + self._repository_manager: RepositoryManager | None = None + try: + protocol_version = self.protocol + except httpx.RequestError as err: self.close() raise RDF4JUnsupportedProtocolError( - f"RDF4J server protocol version {self.protocol} is not supported. Minimum required version is 12." + f"Failed to check protocol version: {err}" + ) from err + if protocol_version < 12: + self.close() + raise RDF4JUnsupportedProtocolError( + f"RDF4J server protocol version {protocol_version} is not supported. Minimum required version is 12." ) - self._repository_manager: RepositoryManager | None = None def __enter__(self): return self From fc15b9755d9bee2127060820f015622ba0838682 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 12:09:00 +1000 Subject: [PATCH 48/54] feat: dynamically import httpx by checking its existence with find_spec. Update tests to skip if httpx is not available --- rdflib/contrib/rdf4j/__init__.py | 11 +++++++++-- test/test_rdf4j/test_e2e/conftest.py | 6 +++++- test/test_rdf4j/test_e2e/test_client.py | 6 +++++- test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py | 5 +++++ test/test_rdf4j/test_e2e/test_e2e_repo_management.py | 6 ++++-- test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py | 5 +++++ test/test_rdf4j/test_e2e/test_e2e_repo_query.py | 7 +++++++ test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py | 5 +++++ test/test_rdf4j/test_e2e/test_e2e_repo_update.py | 7 +++++++ test/test_rdf4j/test_e2e/test_graphdb/conftest.py | 5 +++++ .../test_graphdb/test_graphdb_repo_management.py | 5 +++++ test/test_rdf4j/test_unit/repository/conftest.py | 6 +++++- .../test_unit/repository/test_repo_delete.py | 5 +++++ test/test_rdf4j/test_unit/repository/test_repo_get.py | 5 +++++ .../test_unit/repository/test_repo_graph_store_add.py | 5 +++++ .../repository/test_repo_graph_store_clear.py | 5 +++++ .../test_unit/repository/test_repo_graph_store_get.py | 5 +++++ .../repository/test_repo_graph_store_overwrite.py | 5 +++++ .../test_unit/repository/test_repo_graphs.py | 5 +++++ .../test_unit/repository/test_repo_namespace_clear.py | 5 +++++ .../test_unit/repository/test_repo_namespace_get.py | 5 +++++ .../test_unit/repository/test_repo_namespace_list.py | 5 +++++ .../repository/test_repo_namespace_remove.py | 5 +++++ .../test_unit/repository/test_repo_namespace_set.py | 5 +++++ .../test_unit/repository/test_repo_overwrite.py | 5 +++++ .../test_unit/repository/test_repo_query.py | 5 +++++ .../test_rdf4j/test_unit/repository/test_repo_size.py | 5 +++++ .../repository/test_repo_transaction_commit.py | 5 +++++ .../repository/test_repo_transaction_delete.py | 5 +++++ .../test_unit/repository/test_repo_transaction_get.py | 5 +++++ .../repository/test_repo_transaction_ping.py | 5 +++++ .../repository/test_repo_transaction_query.py | 5 +++++ .../repository/test_repo_transaction_rollback.py | 5 +++++ .../repository/test_repo_transaction_size.py | 5 +++++ .../repository/test_repo_transaction_update.py | 5 +++++ .../repository/test_repo_transaction_upload.py | 5 +++++ .../test_unit/repository/test_repo_update.py | 5 +++++ .../test_unit/repository/test_repo_upload.py | 5 +++++ 38 files changed, 197 insertions(+), 7 deletions(-) diff --git a/rdflib/contrib/rdf4j/__init__.py b/rdflib/contrib/rdf4j/__init__.py index df2a3556c..f0a2e8cf9 100644 --- a/rdflib/contrib/rdf4j/__init__.py +++ b/rdflib/contrib/rdf4j/__init__.py @@ -1,3 +1,10 @@ -from .client import RDF4JClient +from importlib.util import find_spec -__all__ = ["RDF4JClient"] +has_httpx = find_spec("httpx") is not None + +if has_httpx: + from .client import RDF4JClient + + __all__ = ["RDF4JClient", "has_httpx"] +else: + __all__ = ["has_httpx"] diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py index a960785d4..a578060fa 100644 --- a/test/test_rdf4j/test_e2e/conftest.py +++ b/test/test_rdf4j/test_e2e/conftest.py @@ -5,7 +5,11 @@ from testcontainers.core.image import DockerImage from testcontainers.core.waiting_utils import wait_for_logs -from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j import RDF4JClient, has_httpx + +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) GRAPHDB_PORT = 7200 diff --git a/test/test_rdf4j/test_e2e/test_client.py b/test/test_rdf4j/test_e2e/test_client.py index 45281ee23..9077e22f8 100644 --- a/test/test_rdf4j/test_e2e/test_client.py +++ b/test/test_rdf4j/test_e2e/test_client.py @@ -1,8 +1,12 @@ import pytest -from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j import RDF4JClient, has_httpx from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.testcontainer def test_client_close_method(client: RDF4JClient): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py index ba5f610bc..5ec6492bb 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py @@ -3,9 +3,14 @@ import pytest from rdflib import Dataset, Graph, URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name", [URIRef("urn:graph:a"), DATASET_DEFAULT_GRAPH_ID] diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py index 55352f2fe..0727364df 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py @@ -5,7 +5,7 @@ from rdflib import BNode, Dataset, URIRef from rdflib.compare import isomorphic -from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j import RDF4JClient, has_httpx from rdflib.contrib.rdf4j.client import Repository from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, @@ -14,7 +14,9 @@ RepositoryNotHealthyError, ) -# TODO: only run these tests on py39 or greater. Testcontainers not available on py38. +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) @pytest.mark.testcontainer diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py index 5ea93bad8..950874249 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py @@ -1,7 +1,12 @@ import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.testcontainer def test_e2e_repo_namespace_crud(repo: Repository): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py index 7cc1863da..542e93eca 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py @@ -1,8 +1,15 @@ from pathlib import Path +import pytest + +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository from rdflib.term import URIRef, Variable +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index f62723af1..fbe0c5444 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -2,10 +2,15 @@ import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository, Transaction from rdflib.contrib.rdf4j.exceptions import TransactionClosedError from rdflib.term import Literal, URIRef, Variable +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_e2e_repo_transaction(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py index 15762c9ee..02f2bfebd 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py @@ -1,7 +1,14 @@ from pathlib import Path +import pytest + +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py index 725e16994..90e2e563c 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py @@ -2,6 +2,11 @@ from testcontainers.core.container import DockerContainer from rdflib.contrib.graphdb import GraphDBClient +from rdflib.contrib.rdf4j import has_httpx + +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) @pytest.fixture(scope="function") diff --git a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py index 6102fa424..753056d4e 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py @@ -4,12 +4,17 @@ import pytest from rdflib.contrib.graphdb import GraphDBClient +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.exceptions import ( RepositoryAlreadyExistsError, RepositoryNotFoundError, RepositoryNotHealthyError, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + # TODO: consider parameterizing the client (RDF4JClient, GraphDBClient) diff --git a/test/test_rdf4j/test_unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py index 0278e9104..7a93fbc80 100644 --- a/test/test_rdf4j/test_unit/repository/conftest.py +++ b/test/test_rdf4j/test_unit/repository/conftest.py @@ -5,9 +5,13 @@ import httpx import pytest -from rdflib.contrib.rdf4j import RDF4JClient +from rdflib.contrib.rdf4j import RDF4JClient, has_httpx from rdflib.contrib.rdf4j.client import Repository, RepositoryManager +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.fixture(scope="function") def client(monkeypatch: pytest.MonkeyPatch): diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_delete.py index 06e996098..22d8c499e 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_delete.py @@ -6,6 +6,7 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( ObjectType, PredicateType, @@ -15,6 +16,10 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "subj, pred, obj, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index ec151e27c..3e3854974 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -7,6 +7,7 @@ import pytest from rdflib import Dataset, Graph +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( NamespaceManager, ObjectType, @@ -17,6 +18,10 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "content_type, data, expected_class_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py index fc2fdc5e2..60a228475 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py @@ -6,10 +6,15 @@ import pytest from rdflib import URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py index 0e6f7c346..e34d25f2d 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py @@ -6,10 +6,15 @@ import pytest from rdflib import URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py index fbbf1ad19..712b06ec1 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py @@ -6,10 +6,15 @@ import pytest from rdflib import Graph, URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py index 6ffed5474..79c5c2cbf 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py @@ -6,10 +6,15 @@ import pytest from rdflib import URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py index 5401ed909..6b877a467 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.term import BNode, IdentifiedNode, URIRef +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "response_dict, expected_result", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py index 73bf61ae7..e01e23910 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py @@ -5,8 +5,13 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_namespace_clear(repo: Repository, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index 8f0f66ee2..fafb070f0 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -6,12 +6,17 @@ import pytest from rdflib import Dataset, URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( NamespaceListingResult, NamespaceManager, Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "prefix, response_text, response_status_code, expected_value", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py index b7f52c663..13ba77440 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.term import IdentifiedNode +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "response_dict, expected_result", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py index 18a69c96f..808e0fb39 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -5,8 +5,13 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "prefix", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py index 541d2ed5a..208c32fb1 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py @@ -5,8 +5,13 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "prefix, namespace", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index 755c7db52..5b59707d7 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -9,9 +9,14 @@ import pytest from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize("class_type", [Graph, Dataset]) def test_repo_overwrite_graph( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py index 9aa3f9343..007e8f8d1 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py @@ -6,11 +6,16 @@ import pytest from rdflib import Graph +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) from rdflib.term import URIRef, Variable +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "query, accept_header, response_text, expected_result_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py index 45d890614..2766525e0 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -6,6 +6,7 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) @@ -13,6 +14,10 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "graph_name, expected_graph_name_param", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py index b05f6c35e..79a2e6abc 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py @@ -5,6 +5,7 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) @@ -12,6 +13,10 @@ TransactionClosedError, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_commit( repo: Repository, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py index 6a90944d3..386cdce53 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Transaction, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "base_uri, content_type, expected_headers, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py index 27deee049..6793d799d 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -7,6 +7,7 @@ import pytest from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( NamespaceManager, ObjectType, @@ -16,6 +17,10 @@ ) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_get(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py index f47de348b..12121c561 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py @@ -5,6 +5,7 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Transaction, ) @@ -12,6 +13,10 @@ TransactionPingError, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): # Test a successful ping. diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py index a447a30eb..4e8248d0b 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py @@ -5,12 +5,17 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Transaction, ) from rdflib.graph import Graph from rdflib.term import URIRef, Variable +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize( "query, accept_header, response_text, expected_result_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py index 9cb92b739..25f7d0171 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py @@ -5,6 +5,7 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) @@ -13,6 +14,10 @@ TransactionPingError, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_rollback( repo: Repository, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py index 45315b0b5..b1b5f63c3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Transaction, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, text="10") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py index 313d7cb77..1a65e1328 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Transaction, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_update(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, status_code=204) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py index fdba18422..9572c4154 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py @@ -8,8 +8,13 @@ import pytest from rdflib import Dataset, Graph +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Transaction +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_transaction_upload(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_update.py b/test/test_rdf4j/test_unit/repository/test_repo_update.py index 09696e86b..c4aed86af 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_update.py @@ -5,10 +5,15 @@ import httpx import pytest +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import ( Repository, ) +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + def test_repo_update(repo: Repository, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, status_code=204) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py index 5486dc946..afe3f10f3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -8,8 +8,13 @@ import pytest from rdflib import Dataset, Graph +from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.client import Repository +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + @pytest.mark.parametrize("class_type", [Graph, Dataset]) def test_repo_upload_graph( From 40f86ac2cd06e3a6b5475d562e90313a6df03983 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 13:23:06 +1000 Subject: [PATCH 49/54] test: put httpx import behind has_httpx condition --- test/test_rdf4j/test_e2e/conftest.py | 5 ++++- test/test_rdf4j/test_e2e/test_client.py | 7 +++++-- .../test_e2e/test_e2e_repo_graph_store.py | 4 +++- .../test_e2e/test_e2e_repo_management.py | 21 +++++++++++-------- .../test_e2e/test_e2e_repo_namespace.py | 4 +++- .../test_e2e/test_e2e_repo_query.py | 4 +++- .../test_e2e/test_e2e_repo_transaction.py | 6 ++++-- .../test_e2e/test_e2e_repo_update.py | 4 +++- .../test_e2e/test_graphdb/conftest.py | 4 +++- .../test_graphdb_repo_management.py | 16 +++++++------- .../test_unit/repository/conftest.py | 9 +++++--- .../test_unit/repository/test_repo_delete.py | 17 ++++++++------- .../test_unit/repository/test_repo_get.py | 18 +++++++++------- .../repository/test_repo_graph_store_add.py | 10 +++++---- .../repository/test_repo_graph_store_clear.py | 10 +++++---- .../repository/test_repo_graph_store_get.py | 10 +++++---- .../test_repo_graph_store_overwrite.py | 10 +++++---- .../test_unit/repository/test_repo_graphs.py | 8 ++++--- .../repository/test_repo_namespace_clear.py | 6 ++++-- .../repository/test_repo_namespace_get.py | 14 +++++++------ .../repository/test_repo_namespace_list.py | 8 ++++--- .../repository/test_repo_namespace_remove.py | 6 ++++-- .../repository/test_repo_namespace_set.py | 6 ++++-- .../repository/test_repo_overwrite.py | 6 ++++-- .../test_unit/repository/test_repo_query.py | 10 +++++---- .../test_unit/repository/test_repo_size.py | 12 ++++++----- .../test_repo_transaction_commit.py | 16 +++++++------- .../test_repo_transaction_delete.py | 10 +++++---- .../repository/test_repo_transaction_get.py | 18 +++++++++------- .../repository/test_repo_transaction_ping.py | 16 +++++++------- .../repository/test_repo_transaction_query.py | 10 +++++---- .../test_repo_transaction_rollback.py | 18 +++++++++------- .../repository/test_repo_transaction_size.py | 10 +++++---- .../test_repo_transaction_update.py | 10 +++++---- .../test_repo_transaction_upload.py | 6 ++++-- .../test_unit/repository/test_repo_update.py | 10 +++++---- .../test_unit/repository/test_repo_upload.py | 6 ++++-- 37 files changed, 222 insertions(+), 143 deletions(-) diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py index a578060fa..daf4d48b5 100644 --- a/test/test_rdf4j/test_e2e/conftest.py +++ b/test/test_rdf4j/test_e2e/conftest.py @@ -5,12 +5,15 @@ from testcontainers.core.image import DockerImage from testcontainers.core.waiting_utils import wait_for_logs -from rdflib.contrib.rdf4j import RDF4JClient, has_httpx +from rdflib.contrib.rdf4j import has_httpx pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j import RDF4JClient + GRAPHDB_PORT = 7200 diff --git a/test/test_rdf4j/test_e2e/test_client.py b/test/test_rdf4j/test_e2e/test_client.py index 9077e22f8..e5acaa26f 100644 --- a/test/test_rdf4j/test_e2e/test_client.py +++ b/test/test_rdf4j/test_e2e/test_client.py @@ -1,12 +1,15 @@ import pytest -from rdflib.contrib.rdf4j import RDF4JClient, has_httpx -from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError +from rdflib.contrib.rdf4j import has_httpx pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j import RDF4JClient + from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError + @pytest.mark.testcontainer def test_client_close_method(client: RDF4JClient): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py index 5ec6492bb..e3a1028e2 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py @@ -4,13 +4,15 @@ from rdflib import Dataset, Graph, URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j.client import Repository + @pytest.mark.parametrize( "graph_name", [URIRef("urn:graph:a"), DATASET_DEFAULT_GRAPH_ID] diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py index 0727364df..45ac29198 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py @@ -1,23 +1,26 @@ import pathlib -import httpx import pytest from rdflib import BNode, Dataset, URIRef from rdflib.compare import isomorphic -from rdflib.contrib.rdf4j import RDF4JClient, has_httpx -from rdflib.contrib.rdf4j.client import Repository -from rdflib.contrib.rdf4j.exceptions import ( - RepositoryAlreadyExistsError, - RepositoryFormatError, - RepositoryNotFoundError, - RepositoryNotHealthyError, -) +from rdflib.contrib.rdf4j import has_httpx pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j import RDF4JClient + from rdflib.contrib.rdf4j.client import Repository + from rdflib.contrib.rdf4j.exceptions import ( + RepositoryAlreadyExistsError, + RepositoryFormatError, + RepositoryNotFoundError, + RepositoryNotHealthyError, + ) + @pytest.mark.testcontainer def test_repos(client: RDF4JClient): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py index 950874249..01759a0c4 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py @@ -1,12 +1,14 @@ import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository + @pytest.mark.testcontainer def test_e2e_repo_namespace_crud(repo: Repository): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py index 542e93eca..2b1b3b2f6 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py @@ -3,13 +3,15 @@ import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository from rdflib.term import URIRef, Variable pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j.client import Repository + def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index fbe0c5444..7cbe47a7e 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -3,14 +3,16 @@ import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository, Transaction -from rdflib.contrib.rdf4j.exceptions import TransactionClosedError from rdflib.term import Literal, URIRef, Variable pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j.client import Repository, Transaction + from rdflib.contrib.rdf4j.exceptions import TransactionClosedError + def test_e2e_repo_transaction(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py index 02f2bfebd..60c30be4a 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py @@ -3,12 +3,14 @@ import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.rdf4j.client import Repository + def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py index 90e2e563c..c17944027 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py @@ -1,13 +1,15 @@ import pytest from testcontainers.core.container import DockerContainer -from rdflib.contrib.graphdb import GraphDBClient from rdflib.contrib.rdf4j import has_httpx pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + from rdflib.contrib.graphdb import GraphDBClient + @pytest.fixture(scope="function") def client(graphdb_container: DockerContainer): diff --git a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py index 753056d4e..2cd9ac34c 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py @@ -1,20 +1,22 @@ import pathlib -import httpx import pytest -from rdflib.contrib.graphdb import GraphDBClient from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.exceptions import ( - RepositoryAlreadyExistsError, - RepositoryNotFoundError, - RepositoryNotHealthyError, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.graphdb import GraphDBClient + from rdflib.contrib.rdf4j.exceptions import ( + RepositoryAlreadyExistsError, + RepositoryNotFoundError, + RepositoryNotHealthyError, + ) + # TODO: consider parameterizing the client (RDF4JClient, GraphDBClient) diff --git a/test/test_rdf4j/test_unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py index 7a93fbc80..5048abe06 100644 --- a/test/test_rdf4j/test_unit/repository/conftest.py +++ b/test/test_rdf4j/test_unit/repository/conftest.py @@ -2,16 +2,19 @@ from unittest.mock import Mock -import httpx import pytest -from rdflib.contrib.rdf4j import RDF4JClient, has_httpx -from rdflib.contrib.rdf4j.client import Repository, RepositoryManager +from rdflib.contrib.rdf4j import has_httpx pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j import RDF4JClient + from rdflib.contrib.rdf4j.client import Repository, RepositoryManager + @pytest.fixture(scope="function") def client(monkeypatch: pytest.MonkeyPatch): diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_delete.py index 22d8c499e..390c0aa95 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_delete.py @@ -3,16 +3,9 @@ import typing as t from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - ObjectType, - PredicateType, - Repository, - SubjectType, -) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef @@ -20,6 +13,16 @@ not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + + from rdflib.contrib.rdf4j.client import ( + ObjectType, + PredicateType, + Repository, + SubjectType, + ) + @pytest.mark.parametrize( "subj, pred, obj, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index 3e3854974..f1084a0e8 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -3,18 +3,10 @@ import typing as t from unittest.mock import Mock -import httpx import pytest from rdflib import Dataset, Graph from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - NamespaceManager, - ObjectType, - PredicateType, - Repository, - SubjectType, -) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef @@ -22,6 +14,16 @@ not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + NamespaceManager, + ObjectType, + PredicateType, + Repository, + SubjectType, + ) + @pytest.mark.parametrize( "content_type, data, expected_class_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py index 60a228475..ef15d3463 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py @@ -2,19 +2,21 @@ from unittest.mock import ANY, Mock -import httpx import pytest from rdflib import URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py index e34d25f2d..aecead09f 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py @@ -2,19 +2,21 @@ from unittest.mock import Mock -import httpx import pytest from rdflib import URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py index 712b06ec1..a99c979a3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py @@ -2,19 +2,21 @@ from unittest.mock import Mock -import httpx import pytest from rdflib import Graph, URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py index 79c5c2cbf..f6be3fe9e 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py @@ -2,19 +2,21 @@ from unittest.mock import ANY, Mock -import httpx import pytest from rdflib import URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + @pytest.mark.parametrize( "graph_name, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py index 6b877a467..aab866d35 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py @@ -2,18 +2,20 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository -from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.term import BNode, IdentifiedNode, URIRef pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError + @pytest.mark.parametrize( "response_dict, expected_result", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py index e01e23910..ff95e7830 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py @@ -2,16 +2,18 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + def test_repo_namespace_clear(repo: Repository, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index fafb070f0..15a5d66e2 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -2,21 +2,23 @@ from unittest.mock import Mock -import httpx import pytest from rdflib import Dataset, URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - NamespaceListingResult, - NamespaceManager, - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + NamespaceListingResult, + NamespaceManager, + Repository, + ) + @pytest.mark.parametrize( "prefix, response_text, response_status_code, expected_value", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py index 13ba77440..0713fed2f 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py @@ -2,18 +2,20 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository -from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.term import IdentifiedNode pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository + from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError + @pytest.mark.parametrize( "response_dict, expected_result", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py index 808e0fb39..c5e08b566 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -2,16 +2,18 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + @pytest.mark.parametrize( "prefix", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py index 208c32fb1..23177a33c 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py @@ -2,16 +2,18 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + @pytest.mark.parametrize( "prefix, namespace", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index 5b59707d7..aba9dc8fa 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -5,18 +5,20 @@ from typing import Iterable from unittest.mock import ANY, Mock -import httpx import pytest from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository from rdflib.graph import DATASET_DEFAULT_GRAPH_ID pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + @pytest.mark.parametrize("class_type", [Graph, Dataset]) def test_repo_overwrite_graph( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py index 007e8f8d1..65355b7ca 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py @@ -2,20 +2,22 @@ from unittest.mock import Mock -import httpx import pytest from rdflib import Graph from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) from rdflib.term import URIRef, Variable pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + @pytest.mark.parametrize( "query, accept_header, response_text, expected_result_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py index 2766525e0..b8f8fa22b 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -3,14 +3,9 @@ import typing as t from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) -from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef @@ -18,6 +13,13 @@ not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError + @pytest.mark.parametrize( "graph_name, expected_graph_name_param", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py index 79a2e6abc..a61668d1e 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py @@ -2,21 +2,23 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) -from rdflib.contrib.rdf4j.exceptions import ( - TransactionClosedError, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + from rdflib.contrib.rdf4j.exceptions import ( + TransactionClosedError, + ) + def test_repo_transaction_commit( repo: Repository, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py index 386cdce53..dacaeba78 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py @@ -2,18 +2,20 @@ from unittest.mock import ANY, Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Transaction, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Transaction, + ) + @pytest.mark.parametrize( "base_uri, content_type, expected_headers, expected_params", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py index 6793d799d..8b9fe50f9 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -3,24 +3,26 @@ import typing as t from unittest.mock import Mock -import httpx import pytest from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - NamespaceManager, - ObjectType, - PredicateType, - SubjectType, - Transaction, -) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + NamespaceManager, + ObjectType, + PredicateType, + SubjectType, + Transaction, + ) + def test_repo_transaction_get(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py index 12121c561..32d738fe2 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py @@ -2,21 +2,23 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Transaction, -) -from rdflib.contrib.rdf4j.exceptions import ( - TransactionPingError, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Transaction, + ) + from rdflib.contrib.rdf4j.exceptions import ( + TransactionPingError, + ) + def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): # Test a successful ping. diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py index 4e8248d0b..1b581c420 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py @@ -2,13 +2,9 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Transaction, -) from rdflib.graph import Graph from rdflib.term import URIRef, Variable @@ -16,6 +12,12 @@ not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Transaction, + ) + @pytest.mark.parametrize( "query, accept_header, response_text, expected_result_type", diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py index 25f7d0171..491b8e10f 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py @@ -2,22 +2,24 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) -from rdflib.contrib.rdf4j.exceptions import ( - TransactionClosedError, - TransactionPingError, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + from rdflib.contrib.rdf4j.exceptions import ( + TransactionClosedError, + TransactionPingError, + ) + def test_repo_transaction_rollback( repo: Repository, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py index b1b5f63c3..6326983c9 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py @@ -2,18 +2,20 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Transaction, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Transaction, + ) + def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, text="10") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py index 1a65e1328..faec3a42d 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py @@ -2,18 +2,20 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Transaction, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Transaction, + ) + def test_repo_update(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, status_code=204) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py index 9572c4154..dd2a0866a 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py @@ -4,17 +4,19 @@ import pathlib from unittest.mock import ANY, Mock -import httpx import pytest from rdflib import Dataset, Graph from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Transaction pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Transaction + def test_repo_transaction_upload(txn: Transaction, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_update.py b/test/test_rdf4j/test_unit/repository/test_repo_update.py index c4aed86af..200c4f05a 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_update.py @@ -2,18 +2,20 @@ from unittest.mock import Mock -import httpx import pytest from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import ( - Repository, -) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import ( + Repository, + ) + def test_repo_update(repo: Repository, monkeypatch: pytest.MonkeyPatch): mock_response = Mock(spec=httpx.Response, status_code=204) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py index afe3f10f3..85b357d79 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -4,17 +4,19 @@ import pathlib from unittest.mock import ANY, Mock -import httpx import pytest from rdflib import Dataset, Graph from rdflib.contrib.rdf4j import has_httpx -from rdflib.contrib.rdf4j.client import Repository pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" ) +if has_httpx: + import httpx + from rdflib.contrib.rdf4j.client import Repository + @pytest.mark.parametrize("class_type", [Graph, Dataset]) def test_repo_upload_graph( From fefbb064cca1c35bf4298832ae71b105f751a78e Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 13:36:23 +1000 Subject: [PATCH 50/54] test: fix test annotations --- test/test_rdf4j/test_e2e/conftest.py | 61 +++++----- test/test_rdf4j/test_e2e/test_client.py | 17 +-- .../test_e2e/test_e2e_repo_graph_store.py | 2 + .../test_e2e/test_e2e_repo_management.py | 3 + .../test_e2e/test_e2e_repo_namespace.py | 2 + .../test_e2e/test_e2e_repo_query.py | 2 + .../test_e2e/test_e2e_repo_transaction.py | 4 +- .../test_e2e/test_e2e_repo_update.py | 2 + .../test_e2e/test_graphdb/conftest.py | 13 +- .../test_graphdb_repo_management.py | 14 ++- .../test_unit/repository/conftest.py | 1 + .../test_unit/repository/test_repo_get.py | 1 + .../repository/test_repo_graph_store_add.py | 1 + .../repository/test_repo_graph_store_clear.py | 1 + .../repository/test_repo_graph_store_get.py | 1 + .../test_repo_graph_store_overwrite.py | 1 + .../test_unit/repository/test_repo_graphs.py | 3 +- .../repository/test_repo_namespace_clear.py | 1 + .../repository/test_repo_namespace_get.py | 1 + .../repository/test_repo_namespace_list.py | 111 +++++++++--------- .../repository/test_repo_namespace_remove.py | 1 + .../repository/test_repo_namespace_set.py | 1 + .../repository/test_repo_overwrite.py | 1 + .../test_unit/repository/test_repo_query.py | 1 + .../test_unit/repository/test_repo_size.py | 3 +- .../test_repo_transaction_commit.py | 5 +- .../test_repo_transaction_delete.py | 1 + .../repository/test_repo_transaction_get.py | 1 + .../repository/test_repo_transaction_ping.py | 5 +- .../repository/test_repo_transaction_query.py | 1 + .../test_repo_transaction_rollback.py | 6 +- .../repository/test_repo_transaction_size.py | 1 + .../test_repo_transaction_update.py | 1 + .../test_repo_transaction_upload.py | 1 + .../test_unit/repository/test_repo_update.py | 1 + .../test_unit/repository/test_repo_upload.py | 1 + 36 files changed, 156 insertions(+), 117 deletions(-) diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py index daf4d48b5..90de67eec 100644 --- a/test/test_rdf4j/test_e2e/conftest.py +++ b/test/test_rdf4j/test_e2e/conftest.py @@ -14,33 +14,34 @@ if has_httpx: from rdflib.contrib.rdf4j import RDF4JClient -GRAPHDB_PORT = 7200 - - -@pytest.fixture(scope="function") -def graphdb_container(): - with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image: - container = DockerContainer(str(image)) - container.with_exposed_ports(GRAPHDB_PORT) - container.start() - wait_for_logs(container, "Started GraphDB") - yield container - container.stop() - - -@pytest.fixture(scope="function") -def client(graphdb_container: DockerContainer): - port = graphdb_container.get_exposed_port(7200) - with RDF4JClient(f"http://localhost:{port}/", auth=("admin", "admin")) as client: - yield client - - -@pytest.fixture(scope="function") -def repo(client: RDF4JClient): - config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" - with open(config_path) as file: - config = file.read() - - repo = client.repositories.create("test-repo", config) - assert repo.identifier == "test-repo" - yield repo + GRAPHDB_PORT = 7200 + + @pytest.fixture(scope="function") + def graphdb_container(): + with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image: + container = DockerContainer(str(image)) + container.with_exposed_ports(GRAPHDB_PORT) + container.start() + wait_for_logs(container, "Started GraphDB") + yield container + container.stop() + + @pytest.fixture(scope="function") + def client(graphdb_container: DockerContainer): + port = graphdb_container.get_exposed_port(7200) + with RDF4JClient( + f"http://localhost:{port}/", auth=("admin", "admin") + ) as client: + yield client + + @pytest.fixture(scope="function") + def repo(client: RDF4JClient): + config_path = ( + pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + ) + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + yield repo diff --git a/test/test_rdf4j/test_e2e/test_client.py b/test/test_rdf4j/test_e2e/test_client.py index e5acaa26f..50976d68b 100644 --- a/test/test_rdf4j/test_e2e/test_client.py +++ b/test/test_rdf4j/test_e2e/test_client.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -8,7 +11,12 @@ if has_httpx: from rdflib.contrib.rdf4j import RDF4JClient - from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError + + @pytest.mark.testcontainer + def test_client_protocol_error(monkeypatch): + monkeypatch.setattr(RDF4JClient, "protocol", 11) + with pytest.raises(RDF4JUnsupportedProtocolError): + RDF4JClient("http://example.com/") @pytest.mark.testcontainer @@ -20,10 +28,3 @@ def test_client_close_method(client: RDF4JClient): @pytest.mark.testcontainer def test_client_protocol(client: RDF4JClient): assert client.protocol >= 12 - - -@pytest.mark.testcontainer -def test_client_protocol_error(monkeypatch): - monkeypatch.setattr(RDF4JClient, "protocol", 11) - with pytest.raises(RDF4JUnsupportedProtocolError): - RDF4JClient("http://example.com/") diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py index e3a1028e2..ec17cafd9 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import pytest diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py index 45ac29198..ed25eb25a 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pathlib import pytest @@ -12,6 +14,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j import RDF4JClient from rdflib.contrib.rdf4j.client import Repository from rdflib.contrib.rdf4j.exceptions import ( diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py index 01759a0c4..93d30a694 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest from rdflib.contrib.rdf4j import has_httpx diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py index 2b1b3b2f6..6d28e3aa4 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import pytest diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index 7cbe47a7e..edece3d16 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -1,8 +1,11 @@ +from __future__ import annotations + from pathlib import Path import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import TransactionClosedError from rdflib.term import Literal, URIRef, Variable pytestmark = pytest.mark.skipif( @@ -11,7 +14,6 @@ if has_httpx: from rdflib.contrib.rdf4j.client import Repository, Transaction - from rdflib.contrib.rdf4j.exceptions import TransactionClosedError def test_e2e_repo_transaction(repo: Repository): diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py index 60c30be4a..78f132c0a 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path import pytest diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py index c17944027..97e741ef1 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py @@ -10,9 +10,10 @@ if has_httpx: from rdflib.contrib.graphdb import GraphDBClient - -@pytest.fixture(scope="function") -def client(graphdb_container: DockerContainer): - port = graphdb_container.get_exposed_port(7200) - with GraphDBClient(f"http://localhost:{port}/", auth=("admin", "admin")) as client: - yield client + @pytest.fixture(scope="function") + def client(graphdb_container: DockerContainer): + port = graphdb_container.get_exposed_port(7200) + with GraphDBClient( + f"http://localhost:{port}/", auth=("admin", "admin") + ) as client: + yield client diff --git a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py index 2cd9ac34c..278c9d629 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py @@ -1,8 +1,15 @@ +from __future__ import annotations + import pathlib import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import ( + RepositoryAlreadyExistsError, + RepositoryNotFoundError, + RepositoryNotHealthyError, +) pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -10,12 +17,9 @@ if has_httpx: import httpx + from rdflib.contrib.graphdb import GraphDBClient - from rdflib.contrib.rdf4j.exceptions import ( - RepositoryAlreadyExistsError, - RepositoryNotFoundError, - RepositoryNotHealthyError, - ) + # TODO: consider parameterizing the client (RDF4JClient, GraphDBClient) diff --git a/test/test_rdf4j/test_unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py index 5048abe06..b74b3017f 100644 --- a/test/test_rdf4j/test_unit/repository/conftest.py +++ b/test/test_rdf4j/test_unit/repository/conftest.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j import RDF4JClient from rdflib.contrib.rdf4j.client import Repository, RepositoryManager diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index f1084a0e8..b5a46c1e3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -16,6 +16,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( NamespaceManager, ObjectType, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py index ef15d3463..c9f7675cf 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py @@ -13,6 +13,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py index aecead09f..5cdfbd5ce 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py @@ -13,6 +13,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py index a99c979a3..dedee433b 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py @@ -13,6 +13,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py index f6be3fe9e..6569de25c 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py @@ -13,6 +13,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py index aab866d35..f554cd9ed 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py @@ -5,6 +5,7 @@ import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.term import BNode, IdentifiedNode, URIRef pytestmark = pytest.mark.skipif( @@ -13,8 +14,8 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository - from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError @pytest.mark.parametrize( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py index ff95e7830..5253f2115 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index 15a5d66e2..f2a586dab 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -13,6 +13,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( NamespaceListingResult, NamespaceManager, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py index 0713fed2f..88c70a124 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py @@ -13,66 +13,67 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError - -@pytest.mark.parametrize( - "response_dict, expected_result", - [ - [{"results": {"bindings": []}}, set()], + @pytest.mark.parametrize( + "response_dict, expected_result", [ - { - "results": { - "bindings": [ - { - "prefix": {"value": "test"}, - "namespace": {"value": "http://example.com/test/"}, - }, - { - "prefix": {"value": "test2"}, - "namespace": {"value": "http://example.com/test2/"}, - }, - ] - } - }, - { - NamespaceListingResult( - prefix="test", namespace="http://example.com/test/" - ), - NamespaceListingResult( - prefix="test2", namespace="http://example.com/test2/" - ), - }, + [{"results": {"bindings": []}}, set()], + [ + { + "results": { + "bindings": [ + { + "prefix": {"value": "test"}, + "namespace": {"value": "http://example.com/test/"}, + }, + { + "prefix": {"value": "test2"}, + "namespace": {"value": "http://example.com/test2/"}, + }, + ] + } + }, + { + NamespaceListingResult( + prefix="test", namespace="http://example.com/test/" + ), + NamespaceListingResult( + prefix="test2", namespace="http://example.com/test2/" + ), + }, + ], ], - ], -) -def test_repo_namespace_list( - repo: Repository, - monkeypatch: pytest.MonkeyPatch, - response_dict: dict, - expected_result: set[IdentifiedNode], -): - mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) - mock_httpx_get = Mock(return_value=mock_response) - monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - result = repo.namespaces.list() - assert set(result) == expected_result - mock_httpx_get.assert_called_once_with( - "/repositories/test-repo/namespaces", - headers={"Accept": "application/sparql-results+json"}, ) + def test_repo_namespace_list( + repo: Repository, + monkeypatch: pytest.MonkeyPatch, + response_dict: dict, + expected_result: set[IdentifiedNode], + ): + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + result = repo.namespaces.list() + assert set(result) == expected_result + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/namespaces", + headers={"Accept": "application/sparql-results+json"}, + ) + def test_repo_namespace_list_error( + repo: Repository, monkeypatch: pytest.MonkeyPatch + ): + response_dict: dict[str, str] = {} -def test_repo_namespace_list_error(repo: Repository, monkeypatch: pytest.MonkeyPatch): - response_dict: dict[str, str] = {} - - mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) - mock_httpx_get = Mock(return_value=mock_response) - monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - with pytest.raises(RepositoryFormatError): - repo.namespaces.list() - mock_httpx_get.assert_called_once_with( - "/repositories/test-repo/namespaces", - headers={"Accept": "application/sparql-results+json"}, - ) + mock_response = Mock(spec=httpx.Response, json=lambda: response_dict) + mock_httpx_get = Mock(return_value=mock_response) + monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) + with pytest.raises(RepositoryFormatError): + repo.namespaces.list() + mock_httpx_get.assert_called_once_with( + "/repositories/test-repo/namespaces", + headers={"Accept": "application/sparql-results+json"}, + ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py index c5e08b566..22c2fad66 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py index 23177a33c..9adc820ce 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index aba9dc8fa..e378c9d1d 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -17,6 +17,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py index 65355b7ca..717524b4e 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py @@ -14,6 +14,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py index b8f8fa22b..3370ca9e3 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -6,6 +6,7 @@ import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import BNode, IdentifiedNode, URIRef @@ -15,10 +16,10 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) - from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError @pytest.mark.parametrize( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py index a61668d1e..37dc5472a 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py @@ -5,6 +5,7 @@ import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import TransactionClosedError pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -12,12 +13,10 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) - from rdflib.contrib.rdf4j.exceptions import ( - TransactionClosedError, - ) def test_repo_transaction_commit( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py index dacaeba78..432d7b709 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Transaction, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py index 8b9fe50f9..28e3c3f1d 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -15,6 +15,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( NamespaceManager, ObjectType, diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py index 32d738fe2..82ef0c130 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py @@ -5,6 +5,7 @@ import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import TransactionPingError pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -12,12 +13,10 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Transaction, ) - from rdflib.contrib.rdf4j.exceptions import ( - TransactionPingError, - ) def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch): diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py index 1b581c420..45bcdcfcc 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py @@ -14,6 +14,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Transaction, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py index 491b8e10f..7fb54b4d9 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py @@ -5,6 +5,7 @@ import pytest from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import TransactionClosedError, TransactionPingError pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -12,13 +13,10 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) - from rdflib.contrib.rdf4j.exceptions import ( - TransactionClosedError, - TransactionPingError, - ) def test_repo_transaction_rollback( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py index 6326983c9..de5b214e8 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Transaction, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py index faec3a42d..e14bb59f8 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Transaction, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py index dd2a0866a..38134a306 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_upload.py @@ -15,6 +15,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Transaction diff --git a/test/test_rdf4j/test_unit/repository/test_repo_update.py b/test/test_rdf4j/test_unit/repository/test_repo_update.py index 200c4f05a..d989204d1 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_update.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_update.py @@ -12,6 +12,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import ( Repository, ) diff --git a/test/test_rdf4j/test_unit/repository/test_repo_upload.py b/test/test_rdf4j/test_unit/repository/test_repo_upload.py index 85b357d79..9049d5bf5 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_upload.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_upload.py @@ -15,6 +15,7 @@ if has_httpx: import httpx + from rdflib.contrib.rdf4j.client import Repository From 1faa1e30ea6957db8c600e458b1aa21331c5b777 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 13:51:05 +1000 Subject: [PATCH 51/54] fix: add conditional import for GraphDBClient --- rdflib/contrib/graphdb/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/rdflib/contrib/graphdb/__init__.py b/rdflib/contrib/graphdb/__init__.py index ca6a8ef4d..2c112ef55 100644 --- a/rdflib/contrib/graphdb/__init__.py +++ b/rdflib/contrib/graphdb/__init__.py @@ -1,3 +1,6 @@ -from .client import GraphDBClient +from rdflib.contrib.rdf4j import has_httpx -__all__ = ["GraphDBClient"] +if has_httpx: + from .client import GraphDBClient + + __all__ = ["GraphDBClient"] From 89171df737d0c096cbdff993720287be6e167072 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 7 Nov 2025 13:51:34 +1000 Subject: [PATCH 52/54] test: ignore rdf4j and graphdb client.py for docstring tests --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 4101742e0..0d7582c26 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -213,6 +213,8 @@ addopts = [ "--ignore=admin", "--ignore=devtools", "--ignore=rdflib/extras/external_graph_libs.py", + "--ignore=rdflib/contrib/graphdb/client.py", + "--ignore=rdflib/contrib/rdf4j/client.py", "--ignore-glob=docs/*.py", "--ignore-glob=site/*", "--strict-markers", From 38b31414f002c901a584e20a5530ad78ad3819f7 Mon Sep 17 00:00:00 2001 From: Edmond Chuc <37032744+edmondchuc@users.noreply.github.com> Date: Fri, 14 Nov 2025 17:35:08 +1000 Subject: [PATCH 53/54] feat: RDF4J store (#3316) * fix: handle graph_name when it's a str * feat: wip RDF4JStore Implements: - init/open - close - add - addN - contexts - add_graph - remove_graph - __len__ * feat: RDF4J Store now supports handling namespaces and prefixes * feat: RDF4J Store triples and quads querying * feat: ensure no bnodes are used to cross document/query boundaries * chore: formatting * test: improve e2e test speed by reusing the same container and cleaning up the repo between each tests * feat: add RDF4JStore remove * feat: add RDF4JStore triples_choices tests * feat: add RDF4JStore SPARQL query and update tests * chore: fix mypy issues * test: error handling on client fixture * test: mark testcontainer tests and put test imports behind the has_httpx flag * build: remove upper python bound, bump testcontainers, and revert back to stable v7 poetry.lock * test: put testcontainer tests behind a flag for unsupported python versions * test: install rdf4j extras for python 3.9 and above * ci: skip testcontainer tests on non-linux runners --- .github/workflows/validate.yaml | 12 +- poetry.lock | 1026 ++++++++--------- pyproject.toml | 7 +- rdflib/contrib/rdf4j/client.py | 55 +- rdflib/contrib/rdf4j/util.py | 31 +- rdflib/plugins/stores/rdf4j.py | 223 ++++ test/test_rdf4j/test_e2e/conftest.py | 41 +- .../test_e2e/test_e2e_rdf4j_store.py | 528 +++++++++ .../test_e2e/test_e2e_repo_management.py | 38 + .../test_e2e/test_e2e_repo_query.py | 1 + .../test_e2e/test_e2e_repo_transaction.py | 4 + .../test_e2e/test_e2e_repo_update.py | 1 + .../test_e2e/test_graphdb/conftest.py | 12 +- .../test_unit/repository/test_repo_delete.py | 19 +- .../test_unit/repository/test_repo_get.py | 19 +- .../repository/test_repo_overwrite.py | 9 +- .../test_unit/repository/test_repo_size.py | 9 +- .../repository/test_repo_transaction_get.py | 20 +- .../test_unit/util/test_rdf4j_util.py | 23 +- tox.ini | 1 + 20 files changed, 1442 insertions(+), 637 deletions(-) create mode 100644 rdflib/plugins/stores/rdf4j.py create mode 100644 test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml index 838a26ef3..98bce1274 100644 --- a/.github/workflows/validate.yaml +++ b/.github/workflows/validate.yaml @@ -12,7 +12,6 @@ env: POETRY_CACHE_DIR: ${{ github.workspace }}/.var/cache/pypoetry PIP_CACHE_DIR: ${{ github.workspace }}/.var/cache/pip - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -52,7 +51,7 @@ jobs: PREPARATION: "sudo apt-get install -y firejail" extensive-tests: true TOX_TEST_HARNESS: "firejail --net=none --" - TOX_PYTEST_EXTRA_ARGS: "-m 'not webtest'" + TOX_PYTEST_EXTRA_ARGS: "-m 'not (testcontainer or webtest)'" steps: - uses: actions/checkout@v4 - name: Cache XDG_CACHE_HOME @@ -84,6 +83,13 @@ jobs: shell: bash run: | ${{ matrix.PREPARATION }} + - name: Set testcontainer exclusion for non-Linux + if: ${{ matrix.os != 'ubuntu-latest' }} + shell: bash + run: | + if [ -z "${{ matrix.TOX_PYTEST_EXTRA_ARGS }}" ]; then + echo "TOX_PYTEST_EXTRA_ARGS=-m 'not testcontainer'" >> $GITHUB_ENV + fi - name: Run validation shell: bash run: | @@ -97,7 +103,7 @@ jobs: gha:validate env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS }} + TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS || env.TOX_PYTEST_EXTRA_ARGS }} TOX_TEST_HARNESS: ${{ matrix.TOX_TEST_HARNESS }} TOX_EXTRA_COMMAND: ${{ matrix.TOX_EXTRA_COMMAND }} - uses: actions/upload-artifact@v4 diff --git a/poetry.lock b/poetry.lock index 590f32895..c138e1b9a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,23 +2,25 @@ [[package]] name = "anyio" -version = "4.11.0" -description = "High-level concurrency and networking framework on top of asyncio or Trio" +version = "4.5.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = true -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, - {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -trio = ["trio (>=0.31.0)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "babel" @@ -31,23 +33,25 @@ files = [ {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + [package.extras] dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "backrefs" -version = "5.9" +version = "5.7.post1" description = "A wrapper around re and regex that adds additional back references." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "backrefs-5.9-py310-none-any.whl", hash = "sha256:db8e8ba0e9de81fcd635f440deab5ae5f2591b54ac1ebe0550a2ca063488cd9f"}, - {file = "backrefs-5.9-py311-none-any.whl", hash = "sha256:6907635edebbe9b2dc3de3a2befff44d74f30a4562adbb8b36f21252ea19c5cf"}, - {file = "backrefs-5.9-py312-none-any.whl", hash = "sha256:7fdf9771f63e6028d7fee7e0c497c81abda597ea45d6b8f89e8ad76994f5befa"}, - {file = "backrefs-5.9-py313-none-any.whl", hash = "sha256:cc37b19fa219e93ff825ed1fed8879e47b4d89aa7a1884860e2db64ccd7c676b"}, - {file = "backrefs-5.9-py314-none-any.whl", hash = "sha256:df5e169836cc8acb5e440ebae9aad4bf9d15e226d3bad049cf3f6a5c20cc8dc9"}, - {file = "backrefs-5.9-py39-none-any.whl", hash = "sha256:f48ee18f6252b8f5777a22a00a09a85de0ca931658f1dd96d4406a34f3748c60"}, - {file = "backrefs-5.9.tar.gz", hash = "sha256:808548cb708d66b82ee231f962cb36faaf4f2baab032f2fbb783e9c2fdddaa59"}, + {file = "backrefs-5.7.post1-py310-none-any.whl", hash = "sha256:c5e3fd8fd185607a7cb1fefe878cfb09c34c0be3c18328f12c574245f1c0287e"}, + {file = "backrefs-5.7.post1-py311-none-any.whl", hash = "sha256:712ea7e494c5bf3291156e28954dd96d04dc44681d0e5c030adf2623d5606d51"}, + {file = "backrefs-5.7.post1-py312-none-any.whl", hash = "sha256:a6142201c8293e75bce7577ac29e1a9438c12e730d73a59efdd1b75528d1a6c5"}, + {file = "backrefs-5.7.post1-py38-none-any.whl", hash = "sha256:ec61b1ee0a4bfa24267f6b67d0f8c5ffdc8e0d7dc2f18a2685fd1d8d9187054a"}, + {file = "backrefs-5.7.post1-py39-none-any.whl", hash = "sha256:05c04af2bf752bb9a6c9dcebb2aff2fab372d3d9d311f2a138540e307756bd3a"}, + {file = "backrefs-5.7.post1.tar.gz", hash = "sha256:8b0f83b770332ee2f1c8244f4e03c77d127a0fa529328e6a0e77fa25bee99678"}, ] [package.extras] @@ -55,12 +59,12 @@ extras = ["regex"] [[package]] name = "berkeleydb" -version = "18.1.15" +version = "18.1.10" description = "Python bindings for Oracle Berkeley DB" optional = true python-versions = "*" files = [ - {file = "berkeleydb-18.1.15.tar.gz", hash = "sha256:7afa53143d754c6bb2c85656c1325ebae518adcfcd1b59e13cc2abb88ddf758e"}, + {file = "berkeleydb-18.1.10.tar.gz", hash = "sha256:426341a16007a9002d987a6f4d97226f8eafffcb1a0488488053d38a3127c81a"}, ] [[package]] @@ -122,13 +126,13 @@ files = [ [[package]] name = "build" -version = "1.3.0" +version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4"}, - {file = "build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397"}, + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, ] [package.dependencies] @@ -139,8 +143,11 @@ pyproject_hooks = "*" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.11)", "virtualenv (>=20.17)", "virtualenv (>=20.31)"] +virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "certifi" @@ -302,115 +309,83 @@ files = [ [[package]] name = "coverage" -version = "7.10.7" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, - {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}, - {file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}, - {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}, - {file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}, - {file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}, - {file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}, - {file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}, - {file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}, - {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}, - {file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}, - {file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}, - {file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}, - {file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}, - {file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}, - {file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}, - {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}, - {file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}, - {file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}, - {file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}, - {file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}, - {file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}, - {file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}, - {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}, - {file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}, - {file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}, - {file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}, - {file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}, - {file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}, - {file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}, - {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}, - {file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}, - {file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}, - {file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}, - {file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}, - {file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}, - {file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}, - {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}, - {file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}, - {file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}, - {file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}, - {file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}, - {file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}, - {file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}, - {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}, - {file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}, - {file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}, - {file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}, - {file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"}, - {file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"}, - {file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"}, - {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"}, - {file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"}, - {file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"}, - {file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}, - {file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -572,13 +547,13 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.7.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] @@ -590,7 +565,7 @@ cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -796,118 +771,89 @@ test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytes [[package]] name = "markdown" -version = "3.9" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280"}, - {file = "markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] -docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "3.0.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" files = [ - {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, - {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, - {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, - {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, - {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, - {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, - {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, - {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, - {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, - {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, - {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, - {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, - {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, - {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, - {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, - {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, - {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, - {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, - {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, - {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, - {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, - {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, - {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, - {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, - {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, - {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, - {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, - {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, - {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, - {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, - {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, - {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, - {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, - {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, - {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, - {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, - {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, - {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, - {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, - {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, - {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, - {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, - {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, - {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, - {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, - {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, - {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, - {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, - {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, - {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, - {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, - {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, - {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, - {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, - {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, - {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, - {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, - {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, - {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, - {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, - {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, - {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, - {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, - {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, - {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, - {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, - {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, - {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, - {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, - {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, - {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, - {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, - {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, - {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, - {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, - {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, - {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, - {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, - {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, - {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, - {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, - {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, - {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, - {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, - {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, - {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, - {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, - {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, - {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1172,116 +1118,108 @@ files = [ [[package]] name = "networkx" -version = "3.2.1" +version = "3.1" description = "Python package for creating and manipulating graphs and networks" optional = true -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, - {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, ] [package.extras] -default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "orjson" -version = "3.11.4" +version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = true -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "orjson-3.11.4-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e3aa2118a3ece0d25489cbe48498de8a5d580e42e8d9979f65bf47900a15aba1"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a69ab657a4e6733133a3dca82768f2f8b884043714e8d2b9ba9f52b6efef5c44"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3740bffd9816fc0326ddc406098a3a8f387e42223f5f455f2a02a9f834ead80c"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65fd2f5730b1bf7f350c6dc896173d3460d235c4be007af73986d7cd9a2acd23"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fdc3ae730541086158d549c97852e2eea6820665d4faf0f41bf99df41bc11ea"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e10b4d65901da88845516ce9f7f9736f9638d19a1d483b3883dc0182e6e5edba"}, - {file = "orjson-3.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6a03a678085f64b97f9d4a9ae69376ce91a3a9e9b56a82b1580d8e1d501aff"}, - {file = "orjson-3.11.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c82e4f0b1c712477317434761fbc28b044c838b6b1240d895607441412371ac"}, - {file = "orjson-3.11.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d58c166a18f44cc9e2bad03a327dc2d1a3d2e85b847133cfbafd6bfc6719bd79"}, - {file = "orjson-3.11.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94f206766bf1ea30e1382e4890f763bd1eefddc580e08fec1ccdc20ddd95c827"}, - {file = "orjson-3.11.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:41bf25fb39a34cf8edb4398818523277ee7096689db352036a9e8437f2f3ee6b"}, - {file = "orjson-3.11.4-cp310-cp310-win32.whl", hash = "sha256:fa9627eba4e82f99ca6d29bc967f09aba446ee2b5a1ea728949ede73d313f5d3"}, - {file = "orjson-3.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:23ef7abc7fca96632d8174ac115e668c1e931b8fe4dde586e92a500bf1914dcc"}, - {file = "orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39"}, - {file = "orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a"}, - {file = "orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be"}, - {file = "orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7"}, - {file = "orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549"}, - {file = "orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905"}, - {file = "orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907"}, - {file = "orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c"}, - {file = "orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a"}, - {file = "orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045"}, - {file = "orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50"}, - {file = "orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708"}, - {file = "orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210"}, - {file = "orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241"}, - {file = "orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b"}, - {file = "orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c"}, - {file = "orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9"}, - {file = "orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa"}, - {file = "orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140"}, - {file = "orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e"}, - {file = "orjson-3.11.4-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2d6737d0e616a6e053c8b4acc9eccea6b6cce078533666f32d140e4f85002534"}, - {file = "orjson-3.11.4-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:afb14052690aa328cc118a8e09f07c651d301a72e44920b887c519b313d892ff"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38aa9e65c591febb1b0aed8da4d469eba239d434c218562df179885c94e1a3ad"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f2cf4dfaf9163b0728d061bebc1e08631875c51cd30bf47cb9e3293bfbd7dcd5"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89216ff3dfdde0e4070932e126320a1752c9d9a758d6a32ec54b3b9334991a6a"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9daa26ca8e97fae0ce8aa5d80606ef8f7914e9b129b6b5df9104266f764ce436"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c8b2769dc31883c44a9cd126560327767f848eb95f99c36c9932f51090bfce9"}, - {file = "orjson-3.11.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1469d254b9884f984026bd9b0fa5bbab477a4bfe558bba6848086f6d43eb5e73"}, - {file = "orjson-3.11.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:68e44722541983614e37117209a194e8c3ad07838ccb3127d96863c95ec7f1e0"}, - {file = "orjson-3.11.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:8e7805fda9672c12be2f22ae124dcd7b03928d6c197544fe12174b86553f3196"}, - {file = "orjson-3.11.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04b69c14615fb4434ab867bf6f38b2d649f6f300af30a6705397e895f7aec67a"}, - {file = "orjson-3.11.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:639c3735b8ae7f970066930e58cf0ed39a852d417c24acd4a25fc0b3da3c39a6"}, - {file = "orjson-3.11.4-cp313-cp313-win32.whl", hash = "sha256:6c13879c0d2964335491463302a6ca5ad98105fc5db3565499dcb80b1b4bd839"}, - {file = "orjson-3.11.4-cp313-cp313-win_amd64.whl", hash = "sha256:09bf242a4af98732db9f9a1ec57ca2604848e16f132e3f72edfd3c5c96de009a"}, - {file = "orjson-3.11.4-cp313-cp313-win_arm64.whl", hash = "sha256:a85f0adf63319d6c1ba06fb0dbf997fced64a01179cf17939a6caca662bf92de"}, - {file = "orjson-3.11.4-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:42d43a1f552be1a112af0b21c10a5f553983c2a0938d2bbb8ecd8bc9fb572803"}, - {file = "orjson-3.11.4-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:26a20f3fbc6c7ff2cb8e89c4c5897762c9d88cf37330c6a117312365d6781d54"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e3f20be9048941c7ffa8fc523ccbd17f82e24df1549d1d1fe9317712d19938e"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aac364c758dc87a52e68e349924d7e4ded348dedff553889e4d9f22f74785316"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5c54a6d76e3d741dcc3f2707f8eeb9ba2a791d3adbf18f900219b62942803b1"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f28485bdca8617b79d44627f5fb04336897041dfd9fa66d383a49d09d86798bc"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2a484cad3585e4ba61985a6062a4c2ed5c7925db6d39f1fa267c9d166487f"}, - {file = "orjson-3.11.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e34dbd508cb91c54f9c9788923daca129fe5b55c5b4eebe713bf5ed3791280cf"}, - {file = "orjson-3.11.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b13c478fa413d4b4ee606ec8e11c3b2e52683a640b006bb586b3041c2ca5f606"}, - {file = "orjson-3.11.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:724ca721ecc8a831b319dcd72cfa370cc380db0bf94537f08f7edd0a7d4e1780"}, - {file = "orjson-3.11.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:977c393f2e44845ce1b540e19a786e9643221b3323dae190668a98672d43fb23"}, - {file = "orjson-3.11.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1e539e382cf46edec157ad66b0b0872a90d829a6b71f17cb633d6c160a223155"}, - {file = "orjson-3.11.4-cp314-cp314-win32.whl", hash = "sha256:d63076d625babab9db5e7836118bdfa086e60f37d8a174194ae720161eb12394"}, - {file = "orjson-3.11.4-cp314-cp314-win_amd64.whl", hash = "sha256:0a54d6635fa3aaa438ae32e8570b9f0de36f3f6562c308d2a2a452e8b0592db1"}, - {file = "orjson-3.11.4-cp314-cp314-win_arm64.whl", hash = "sha256:78b999999039db3cf58f6d230f524f04f75f129ba3d1ca2ed121f8657e575d3d"}, - {file = "orjson-3.11.4-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:405261b0a8c62bcbd8e2931c26fdc08714faf7025f45531541e2b29e544b545b"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af02ff34059ee9199a3546f123a6ab4c86caf1708c79042caf0820dc290a6d4f"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b2eba969ea4203c177c7b38b36c69519e6067ee68c34dc37081fac74c796e10"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0baa0ea43cfa5b008a28d3c07705cf3ada40e5d347f0f44994a64b1b7b4b5350"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80fd082f5dcc0e94657c144f1b2a3a6479c44ad50be216cf0c244e567f5eae19"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3704d35e47d5bee811fb1cbd8599f0b4009b14d451c4c57be5a7e25eb89a13"}, - {file = "orjson-3.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa447f2b5356779d914658519c874cf3b7629e99e63391ed519c28c8aea4919"}, - {file = "orjson-3.11.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bba5118143373a86f91dadb8df41d9457498226698ebdf8e11cbb54d5b0e802d"}, - {file = "orjson-3.11.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:622463ab81d19ef3e06868b576551587de8e4d518892d1afab71e0fbc1f9cffc"}, - {file = "orjson-3.11.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3e0a700c4b82144b72946b6629968df9762552ee1344bfdb767fecdd634fbd5a"}, - {file = "orjson-3.11.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6e18a5c15e764e5f3fc569b47872450b4bcea24f2a6354c0a0e95ad21045d5a9"}, - {file = "orjson-3.11.4-cp39-cp39-win32.whl", hash = "sha256:fb1c37c71cad991ef4d89c7a634b5ffb4447dbd7ae3ae13e8f5ee7f1775e7ab1"}, - {file = "orjson-3.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:e2985ce8b8c42d00492d0ed79f2bd2b6460d00f2fa671dfde4bf2e02f49bf5c6"}, - {file = "orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d"}, + {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e"}, + {file = "orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab"}, + {file = "orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806"}, + {file = "orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c"}, + {file = "orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e"}, + {file = "orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e"}, + {file = "orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a"}, + {file = "orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665"}, + {file = "orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa"}, + {file = "orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825"}, + {file = "orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890"}, + {file = "orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf"}, + {file = "orjson-3.10.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528"}, + {file = "orjson-3.10.15-cp38-cp38-win32.whl", hash = "sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60"}, + {file = "orjson-3.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1"}, + {file = "orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428"}, + {file = "orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507"}, + {file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"}, + {file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"}, ] [[package]] @@ -1323,13 +1261,13 @@ files = [ [[package]] name = "pip" -version = "25.3" +version = "25.0.1" description = "The PyPA recommended tool for installing Python packages." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pip-25.3-py3-none-any.whl", hash = "sha256:9655943313a94722b7774661c21049070f6bbb0a1516bf02f7c8d5d9201514cd"}, - {file = "pip-25.3.tar.gz", hash = "sha256:8d0538dbbd7babbd207f261ed969c65de439f6bc9e5dbd3b3b9a77f25d95f343"}, + {file = "pip-25.0.1-py3-none-any.whl", hash = "sha256:c46efd13b6aa8279f33f2864459c8ce587ea6a1a59ee20de055868d8f7688f7f"}, + {file = "pip-25.0.1.tar.gz", hash = "sha256:88f96547ea48b940a3a385494e181e29fb8637898f88d88737c5049780f196ea"}, ] [[package]] @@ -1358,34 +1296,34 @@ testing = ["flit_core (>=2,<4)", "poetry_core (>=1.0.0)", "pytest (>=7.2.0)", "p [[package]] name = "platformdirs" -version = "4.4.0" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, - {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.6.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, - {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["coverage", "pytest", "pytest-benchmark"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "pygments" @@ -1403,13 +1341,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.16.1" +version = "10.15" description = "Extension pack for Python Markdown." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.16.1-py3-none-any.whl", hash = "sha256:d6ba157a6c03146a7fb122b2b9a121300056384eafeec9c9f9e584adfdb2a32d"}, - {file = "pymdown_extensions-10.16.1.tar.gz", hash = "sha256:aace82bcccba3efc03e25d584e6a22d27a8e17caa3f4dd9f207e49b787aa9a91"}, + {file = "pymdown_extensions-10.15-py3-none-any.whl", hash = "sha256:46e99bb272612b0de3b7e7caf6da8dd5f4ca5212c0b273feb9304e236c484e5f"}, + {file = "pymdown_extensions-10.15.tar.gz", hash = "sha256:0e5994e32155f4b03504f939e501b981d306daf7ec2aa1cd2eb6bd300784f8f7"}, ] [package.dependencies] @@ -1421,13 +1359,13 @@ extra = ["pygments (>=2.19.1)"] [[package]] name = "pyparsing" -version = "3.2.5" -description = "pyparsing - Classes and methods to define and execute parsing grammars" +version = "3.1.4" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.9" +python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e"}, - {file = "pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -1446,26 +1384,25 @@ files = [ [[package]] name = "pytest" -version = "8.4.2" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, - {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] -colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} -iniconfig = ">=1" -packaging = ">=20" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" pluggy = ">=1.5,<2" -pygments = ">=2.7.2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1513,6 +1450,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2025.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + [[package]] name = "pywin32" version = "311" @@ -1626,13 +1574,13 @@ files = [ [[package]] name = "pyyaml-env-tag" -version = "1.1" -description = "A custom YAML tag for referencing environment variables in YAML files." +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " optional = false -python-versions = ">=3.9" +python-versions = ">=3.6" files = [ - {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, - {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] [package.dependencies] @@ -1640,13 +1588,13 @@ pyyaml = "*" [[package]] name = "requests" -version = "2.32.5" +version = "2.32.4" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, - {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] @@ -1727,13 +1675,13 @@ files = [ [[package]] name = "testcontainers" -version = "4.13.2" +version = "4.13.3" description = "Python library for throwaway instances of anything that can run in a Docker container" optional = false -python-versions = "<4.0,>=3.9.2" +python-versions = ">=3.9.2" files = [ - {file = "testcontainers-4.13.2-py3-none-any.whl", hash = "sha256:0209baf8f4274b568cde95bef2cadf7b1d33b375321f793790462e235cd684ee"}, - {file = "testcontainers-4.13.2.tar.gz", hash = "sha256:2315f1e21b059427a9d11e8921f85fef322fbe0d50749bcca4eaa11271708ba4"}, + {file = "testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970"}, + {file = "testcontainers-4.13.3.tar.gz", hash = "sha256:9d82a7052c9a53c58b69e1dc31da8e7a715e8b3ec1c4df5027561b47e2efe646"}, ] [package.dependencies] @@ -1748,26 +1696,25 @@ arangodb = ["python-arango (>=7.8,<8.0)"] aws = ["boto3", "httpx"] azurite = ["azure-storage-blob (>=12.19,<13.0)"] chroma = ["chromadb-client (>=1.0.0,<2.0.0)"] -clickhouse = ["clickhouse-driver"] cosmosdb = ["azure-cosmos"] db2 = ["ibm_db_sa", "sqlalchemy"] generic = ["httpx", "redis"] google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] influxdb = ["influxdb", "influxdb-client"] -k3s = ["kubernetes", "pyyaml"] +k3s = ["kubernetes", "pyyaml (>=6.0.3)"] keycloak = ["python-keycloak"] localstack = ["boto3"] mailpit = ["cryptography"] minio = ["minio"] mongodb = ["pymongo"] -mssql = ["pymssql", "sqlalchemy"] +mssql = ["pymssql (>=2.3.9)", "sqlalchemy"] mysql = ["pymysql[rsa]", "sqlalchemy"] nats = ["nats-py"] neo4j = ["neo4j"] openfga = ["openfga-sdk"] opensearch = ["opensearch-py"] -oracle = ["oracledb", "sqlalchemy"] -oracle-free = ["oracledb", "sqlalchemy"] +oracle = ["oracledb (>=3.4.1)", "sqlalchemy"] +oracle-free = ["oracledb (>=3.4.1)", "sqlalchemy"] qdrant = ["qdrant-client"] rabbitmq = ["pika"] redis = ["redis"] @@ -1843,24 +1790,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.15.0" -description = "Backported and Experimental Type Hints for Python 3.9+" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, - {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] name = "urllib3" -version = "2.5.0" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, - {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1871,41 +1818,46 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "6.0.0" +version = "4.0.2" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, - {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, - {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, - {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, - {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -1941,118 +1893,118 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [[package]] name = "wrapt" -version = "2.0.0" +version = "2.0.1" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" files = [ - {file = "wrapt-2.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a7cebcee61f21b1e46aa32db8d9d93826d0fbf1ad85defc2ccfb93b4adef1435"}, - {file = "wrapt-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:827e6e3a3a560f6ec1f5ee92d4319c21a0549384f896ec692f3201eda31ebd11"}, - {file = "wrapt-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a91075a5383a7cbfe46aed1845ef7c3f027e8e20e7d9a8a75e36ebc9b0dd15e"}, - {file = "wrapt-2.0.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b6a18c813196e18146b8d041e20875bdb0cb09b94ac1d1e1146e0fa87b2deb0d"}, - {file = "wrapt-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec5028d26011a53c76bd91bb6198b30b438c6e0f7adb45f2ad84fe2655b6a104"}, - {file = "wrapt-2.0.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bed9b04900204721a24bcefc652ca267b01c1e8ad8bc8c0cff81558a45a3aadc"}, - {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03442f2b45fa3f2b98a94a1917f52fb34670de8f96c0a009c02dbd512d855a3d"}, - {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:17d0b5c42495ba142a1cee52b76414f9210591c84aae94dffda70240753bfb3c"}, - {file = "wrapt-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ee44215e7d13e112a8fc74e12ed1a1f41cab2bc07b11cc703f2398cd114b261c"}, - {file = "wrapt-2.0.0-cp310-cp310-win32.whl", hash = "sha256:fe6eafac3bc3c957ab6597a0c0654a0a308868458d00d218743e5b5fae51951c"}, - {file = "wrapt-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e070c3491397fba0445b8977900271eca9656570cca7c900d9b9352186703a0"}, - {file = "wrapt-2.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:806e2e73186eb5e3546f39fb5d0405040e0088db0fc8b2f667fd1863de2b3c99"}, - {file = "wrapt-2.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b7e221abb6c5387819db9323dac3c875b459695057449634f1111955d753c621"}, - {file = "wrapt-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1147a84c8fc852426580af8b6e33138461ddbc65aa459a25ea539374d32069fa"}, - {file = "wrapt-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d6691d4a711504a0bc10de789842ad6ac627bed22937b10f37a1211a8ab7bb3"}, - {file = "wrapt-2.0.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f460e1eb8e75a17c3918c8e35ba57625721eef2439ef0bcf05304ac278a65e1d"}, - {file = "wrapt-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:12c37784b77bf043bf65cc96c7195a5db474b8e54173208af076bdbb61df7b3e"}, - {file = "wrapt-2.0.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75e5c049eb583835f7a0e0e311d9dde9bfbaac723a6dd89d052540f9b2809977"}, - {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e50bcbd5b65dac21b82319fcf18486e6ac439947e9305034b00704eb7405f553"}, - {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:06b78cb6b9320f57737a52fede882640d93cface98332d1a3df0c5696ec9ae9f"}, - {file = "wrapt-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c8349ebfc3cd98bc9105e0112dd8c8ac1f3c7cb5601f9d02248cae83a63f748"}, - {file = "wrapt-2.0.0-cp311-cp311-win32.whl", hash = "sha256:028f19ec29e204fe725139d4a8b09f77ecfb64f8f02b7ab5ee822c85e330b68b"}, - {file = "wrapt-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:c6961f05e58d919153ba311b397b7b904b907132b7b8344dde47865d4bb5ec89"}, - {file = "wrapt-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:be7e316c2accd5a31dbcc230de19e2a846a325f8967fdea72704d00e38e6af06"}, - {file = "wrapt-2.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73c6f734aecb1a030d9a265c13a425897e1ea821b73249bb14471445467ca71c"}, - {file = "wrapt-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b4a7f8023b8ce8a36370154733c747f8d65c8697cb977d8b6efeb89291fff23e"}, - {file = "wrapt-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1cb62f686c50e9dab5983c68f6c8e9cbf14a6007935e683662898a7d892fa69"}, - {file = "wrapt-2.0.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:43dc0550ae15e33e6bb45a82a5e1b5495be2587fbaa996244b509921810ee49f"}, - {file = "wrapt-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39c5b45b056d630545e40674d1f5e1b51864b3546f25ab6a4a331943de96262e"}, - {file = "wrapt-2.0.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:804e88f824b76240a1b670330637ccfd2d18b9efa3bb4f02eb20b2f64880b324"}, - {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c2c476aa3fc2b9899c3f7b20963fac4f952e7edb74a31fc92f7745389a2e3618"}, - {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8d851e526891216f89fcb7a1820dad9bd503ba3468fb9635ee28e93c781aa98e"}, - {file = "wrapt-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b95733c2360c4a8656ee93c7af78e84c0bd617da04a236d7a456c8faa34e7a2d"}, - {file = "wrapt-2.0.0-cp312-cp312-win32.whl", hash = "sha256:ea56817176834edf143df1109ae8fdaa087be82fdad3492648de0baa8ae82bf2"}, - {file = "wrapt-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c7d3bee7be7a2665286103f4d1f15405c8074e6e1f89dac5774f9357c9a3809"}, - {file = "wrapt-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:680f707e1d26acbc60926659799b15659f077df5897a6791c7c598a5d4a211c4"}, - {file = "wrapt-2.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e2ea096db28d5eb64d381af0e93464621ace38a7003a364b6b5ffb7dd713aabe"}, - {file = "wrapt-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c92b5a82d28491e3f14f037e1aae99a27a5e6e0bb161e65f52c0445a3fa7c940"}, - {file = "wrapt-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81d234718aabe632d179fac52c7f69f0f99fbaac4d4bcd670e62462bbcbfcad7"}, - {file = "wrapt-2.0.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db2eea83c43f84e4e41dbbb4c1de371a53166e55f900a6b130c3ef51c6345c1a"}, - {file = "wrapt-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:65f50e356c425c061e1e17fe687ff30e294fed9bf3441dc1f13ef73859c2a817"}, - {file = "wrapt-2.0.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:887f2a667e3cbfb19e204032d42ad7dedaa43972e4861dc7a3d51ae951d9b578"}, - {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9054829da4be461e3ad3192e4b6bbf1fc18af64c9975ce613aec191924e004dc"}, - {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b952ffd77133a5a2798ee3feb18e51b0a299d2f440961e5bb7737dbb02e57289"}, - {file = "wrapt-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e25fde03c480061b8234d8ee4863eb5f40a9be4fb258ce105b364de38fc6bcf9"}, - {file = "wrapt-2.0.0-cp313-cp313-win32.whl", hash = "sha256:49e982b7860d325094978292a49e0418833fc7fc42c0dc7cd0b7524d7d06ee74"}, - {file = "wrapt-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:6e5c86389d9964050ce50babe247d172a5e3911d59a64023b90db2b4fa00ae7c"}, - {file = "wrapt-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:b96fdaa4611e05c7231937930567d3c16782be9dbcf03eb9f60d83e57dd2f129"}, - {file = "wrapt-2.0.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f2c7b7fead096dbf1dcc455b7f59facb05de3f5bfb04f60a69f98cdfe6049e5f"}, - {file = "wrapt-2.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:04c7c8393f25b11c0faa5d907dd9eb462e87e4e7ba55e308a046d7ed37f4bbe2"}, - {file = "wrapt-2.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a93e0f8b376c0735b2f4daf58018b4823614d2b896cb72b6641c4d3dbdca1d75"}, - {file = "wrapt-2.0.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b42d13603da4416c43c430dbc6313c8d7ff745c40942f146ed4f6dd02c7d2547"}, - {file = "wrapt-2.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8bbd2472abf8c33480ad2314b1f8fac45d592aba6cc093e8839a7b2045660e6"}, - {file = "wrapt-2.0.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e64a3a1fd9a308ab9b815a2ad7a65b679730629dbf85f8fc3f7f970d634ee5df"}, - {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d61214525eaf88e0d0edf3d1ad5b5889863c6f88e588c6cdc6aa4ee5d1f10a4a"}, - {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:04f7a5f92c5f7324a1735043cc467b1295a1c5b4e0c1395472b7c44706e3dc61"}, - {file = "wrapt-2.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2356f76cb99b3de5b4e5b8210367fbbb81c7309fe39b622f5d199dd88eb7f765"}, - {file = "wrapt-2.0.0-cp313-cp313t-win32.whl", hash = "sha256:0a921b657a224e40e4bc161b5d33934583b34f0c9c5bdda4e6ac66f9d2fcb849"}, - {file = "wrapt-2.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:c16f6d4eea98080f6659a8a7fc559d4a0a337ee66960659265cad2c8a40f7c0f"}, - {file = "wrapt-2.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:52878edc13dc151c58a9966621d67163a80654bc6cff4b2e1c79fa62d0352b26"}, - {file = "wrapt-2.0.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:79a53d86c2aff7b32cc77267e3a308365d1fcb881e74bc9cbe26f63ee90e37f0"}, - {file = "wrapt-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d731a4f22ed6ffa4cb551b4d2b0c24ff940c27a88edaf8e3490a5ee3a05aef71"}, - {file = "wrapt-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e02ab8c0ac766a5a6e81cd3b6cc39200c69051826243182175555872522bd5a"}, - {file = "wrapt-2.0.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:895870602d65d7338edb3b6a717d856632ad9f14f7ff566214e4fb11f0816649"}, - {file = "wrapt-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b9ad4fab76a0086dc364c4f17f39ad289600e73ef5c6e9ab529aff22cac1ac3"}, - {file = "wrapt-2.0.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e7ca0562606d7bad2736b2c18f61295d61f50cd3f4bfc51753df13614dbcce1b"}, - {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fe089d9f5a4a3dea0108a8ae34bced114d0c4cca417bada1c5e8f42d98af9050"}, - {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e761f2d2f8dbc80384af3d547b522a80e67db3e319c7b02e7fd97aded0a8a678"}, - {file = "wrapt-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:17ba1bdc52d0c783481850996aa26cea5237720769197335abea2ae6b4c23bc0"}, - {file = "wrapt-2.0.0-cp314-cp314-win32.whl", hash = "sha256:f73318741b141223a4674ba96992aa2291b1b3f7a5e85cb3c2c964f86171eb45"}, - {file = "wrapt-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8e08d4edb13cafe7b3260f31d4de033f73d3205774540cf583bffaa4bec97db9"}, - {file = "wrapt-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:af01695c2b7bbd8d67b869d8e3de2b123a7bfbee0185bdd138c2775f75373b83"}, - {file = "wrapt-2.0.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:057f02c13cce7b26c79624c06a3e1c2353e6dc9708525232232f6768118042ca"}, - {file = "wrapt-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:79bdd84570267f3f43d609c892ae2d30b91ee4b8614c2cbfd311a2965f1c9bdb"}, - {file = "wrapt-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:93c8b4f4d54fd401a817abbfc9bf482aa72fd447f8adf19ce81d035b3f5c762c"}, - {file = "wrapt-2.0.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5e09ffd31001dce71c2c2a4fc201bdba9a2f9f62b23700cf24af42266e784741"}, - {file = "wrapt-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d87c285ff04e26083c4b03546e7b74df7ba4f1f32f1dcb92e9ac13c2dbb4c379"}, - {file = "wrapt-2.0.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e52e50ea0a72ea48d1291cf8b8aaedcc99072d9dc5baba6b820486dcf4c67da8"}, - {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fd4c95536975895f32571073446e614d5e2810b666b64955586dcddfd438fd3"}, - {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d6ebfe9283209220ed9de80a3e9442aab8fc2be5a9bbf8491b99e02ca9349a89"}, - {file = "wrapt-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5d3ebd784804f146b7ea55359beb138e23cc18e5a5cc2cf26ad438723c00ce3a"}, - {file = "wrapt-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:9b15940ae9debc8b40b15dc57e1ce4433f7fb9d3f8761c7fab1ddd94cb999d99"}, - {file = "wrapt-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a0efbbc06d3e2077476a04f55859819d23206600b4c33f791359a8e6fa3c362"}, - {file = "wrapt-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:7fec8a9455c029c8cf4ff143a53b6e7c463268d42be6c17efa847ebd2f809965"}, - {file = "wrapt-2.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ac3d8beac68e4863c703b844fcc82693f83f933b37d2a54e9d513b2aab9c76aa"}, - {file = "wrapt-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4b8f8644602803add6848c81b7d214cfd397b1ebab2130dc8530570d888155c"}, - {file = "wrapt-2.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93cb5bff1fcd89b75f869e4f69566a91ab2c9f13e8edf0241fd5777b2fa6d48e"}, - {file = "wrapt-2.0.0-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0eb6d155d02c7525b7ec09856cda5e611fc6eb9ab40d140e1f35f27ac7d5eae"}, - {file = "wrapt-2.0.0-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:309dd467a94ee38a7aa5752bda64e660aeab5723b26200d0b65a375dad9add09"}, - {file = "wrapt-2.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a55e8edd08e2eece131d90d82cd1521962d9152829b22c56e68539526d605825"}, - {file = "wrapt-2.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1724dd7b84d419c80ba839da81ad78b02ac30df626e5aefcb18e94632a965f13"}, - {file = "wrapt-2.0.0-cp38-cp38-win32.whl", hash = "sha256:f8255c380a79f6752d0b920e69a5d656d863675d9c433eeb5548518ee2c8d9da"}, - {file = "wrapt-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:829c8d46465dbae49dba91516f11200a2b5ea91eae8afaccbc035f0b651eb9c4"}, - {file = "wrapt-2.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:094d348ce7e6ce37bf6ed9a6ecc11886c96f447b3ffebc7539ca197daa9a997e"}, - {file = "wrapt-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98223acaa25b1449d993a3f4ffc8b5a03535e4041b37bf6a25459a0c74ee4cfc"}, - {file = "wrapt-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b79bf04c722035b1c474980dc1a64369feab7b703d6fe67da2d8664ed0bc980"}, - {file = "wrapt-2.0.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:623242959cb0c53f76baeb929be79f5f6a9a1673ef51628072b91bf299af2212"}, - {file = "wrapt-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59dc94afc4542c7d9b9447fb2ae1168b5a29064eca4061dbbf3b3c26df268334"}, - {file = "wrapt-2.0.0-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7c532cc9f0a9e6017f8d3c37f478a3e3a5dffa955ebba556274e5e916c058f7"}, - {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9d72c725cefbcc8ebab85c8352e5062ae87b6e323858e934e16b54ced580435a"}, - {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:2ca35b83497276c2ca0b072d2c00da2edde4c2a6c8c650eafcd1a006c17ab231"}, - {file = "wrapt-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2fc55d0da29318a5da33c2827aef8946bba046ac609a4784a90faff73c511174"}, - {file = "wrapt-2.0.0-cp39-cp39-win32.whl", hash = "sha256:9c100b0598f3763274f2033bcc0454de7486409f85bc6da58b49e5971747eb36"}, - {file = "wrapt-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:1316972a72c67936a07dbb48e2464356d91dd9674335aaec087b60094d87750b"}, - {file = "wrapt-2.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:5aad54ff45da9784573099696fd84841c7e559ce312f02afa6aa7e89b58e2c2f"}, - {file = "wrapt-2.0.0-py3-none-any.whl", hash = "sha256:02482fb0df89857e35427dfb844319417e14fae05878f295ee43fa3bf3b15502"}, - {file = "wrapt-2.0.0.tar.gz", hash = "sha256:35a542cc7a962331d0279735c30995b024e852cf40481e384fd63caaa391cbb9"}, + {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:64b103acdaa53b7caf409e8d45d39a8442fe6dcfec6ba3f3d141e0cc2b5b4dbd"}, + {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91bcc576260a274b169c3098e9a3519fb01f2989f6d3d386ef9cbf8653de1374"}, + {file = "wrapt-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab594f346517010050126fcd822697b25a7031d815bb4fbc238ccbe568216489"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:36982b26f190f4d737f04a492a68accbfc6fa042c3f42326fdfbb6c5b7a20a31"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23097ed8bc4c93b7bf36fa2113c6c733c976316ce0ee2c816f64ca06102034ef"}, + {file = "wrapt-2.0.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bacfe6e001749a3b64db47bcf0341da757c95959f592823a93931a422395013"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8ec3303e8a81932171f455f792f8df500fc1a09f20069e5c16bd7049ab4e8e38"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:3f373a4ab5dbc528a94334f9fe444395b23c2f5332adab9ff4ea82f5a9e33bc1"}, + {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f49027b0b9503bf6c8cdc297ca55006b80c2f5dd36cecc72c6835ab6e10e8a25"}, + {file = "wrapt-2.0.1-cp310-cp310-win32.whl", hash = "sha256:8330b42d769965e96e01fa14034b28a2a7600fbf7e8f0cc90ebb36d492c993e4"}, + {file = "wrapt-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:1218573502a8235bb8a7ecaed12736213b22dcde9feab115fa2989d42b5ded45"}, + {file = "wrapt-2.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:eda8e4ecd662d48c28bb86be9e837c13e45c58b8300e43ba3c9b4fa9900302f7"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0e17283f533a0d24d6e5429a7d11f250a58d28b4ae5186f8f47853e3e70d2590"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85df8d92158cb8f3965aecc27cf821461bb5f40b450b03facc5d9f0d4d6ddec6"}, + {file = "wrapt-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1be685ac7700c966b8610ccc63c3187a72e33cab53526a27b2a285a662cd4f7"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:df0b6d3b95932809c5b3fecc18fda0f1e07452d05e2662a0b35548985f256e28"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da7384b0e5d4cae05c97cd6f94faaf78cc8b0f791fc63af43436d98c4ab37bb"}, + {file = "wrapt-2.0.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ec65a78fbd9d6f083a15d7613b2800d5663dbb6bb96003899c834beaa68b242c"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7de3cc939be0e1174969f943f3b44e0d79b6f9a82198133a5b7fc6cc92882f16"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fb1a5b72cbd751813adc02ef01ada0b0d05d3dcbc32976ce189a1279d80ad4a2"}, + {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3fa272ca34332581e00bf7773e993d4f632594eb2d1b0b162a9038df0fd971dd"}, + {file = "wrapt-2.0.1-cp311-cp311-win32.whl", hash = "sha256:fc007fdf480c77301ab1afdbb6ab22a5deee8885f3b1ed7afcb7e5e84a0e27be"}, + {file = "wrapt-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:47434236c396d04875180171ee1f3815ca1eada05e24a1ee99546320d54d1d1b"}, + {file = "wrapt-2.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:837e31620e06b16030b1d126ed78e9383815cbac914693f54926d816d35d8edf"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841"}, + {file = "wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9"}, + {file = "wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684"}, + {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb"}, + {file = "wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9"}, + {file = "wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75"}, + {file = "wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f"}, + {file = "wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c"}, + {file = "wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2"}, + {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b"}, + {file = "wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7"}, + {file = "wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3"}, + {file = "wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1"}, + {file = "wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3"}, + {file = "wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf"}, + {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e"}, + {file = "wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c"}, + {file = "wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92"}, + {file = "wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5e53b428f65ece6d9dad23cb87e64506392b720a0b45076c05354d27a13351a1"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ad3ee9d0f254851c71780966eb417ef8e72117155cff04821ab9b60549694a55"}, + {file = "wrapt-2.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d7b822c61ed04ee6ad64bc90d13368ad6eb094db54883b5dde2182f67a7f22c0"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7164a55f5e83a9a0b031d3ffab4d4e36bbec42e7025db560f225489fa929e509"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e60690ba71a57424c8d9ff28f8d006b7ad7772c22a4af432188572cd7fa004a1"}, + {file = "wrapt-2.0.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3cd1a4bd9a7a619922a8557e1318232e7269b5fb69d4ba97b04d20450a6bf970"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4c2e3d777e38e913b8ce3a6257af72fb608f86a1df471cb1d4339755d0a807c"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3d366aa598d69416b5afedf1faa539fac40c1d80a42f6b236c88c73a3c8f2d41"}, + {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c235095d6d090aa903f1db61f892fffb779c1eaeb2a50e566b52001f7a0f66ed"}, + {file = "wrapt-2.0.1-cp314-cp314-win32.whl", hash = "sha256:bfb5539005259f8127ea9c885bdc231978c06b7a980e63a8a61c8c4c979719d0"}, + {file = "wrapt-2.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:4ae879acc449caa9ed43fc36ba08392b9412ee67941748d31d94e3cedb36628c"}, + {file = "wrapt-2.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:8639b843c9efd84675f1e100ed9e99538ebea7297b62c4b45a7042edb84db03e"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:9219a1d946a9b32bb23ccae66bdb61e35c62773ce7ca6509ceea70f344656b7b"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fa4184e74197af3adad3c889a1af95b53bb0466bced92ea99a0c014e48323eec"}, + {file = "wrapt-2.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c5ef2f2b8a53b7caee2f797ef166a390fef73979b15778a4a153e4b5fedce8fa"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e042d653a4745be832d5aa190ff80ee4f02c34b21f4b785745eceacd0907b815"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2afa23318136709c4b23d87d543b425c399887b4057936cd20386d5b1422b6fa"}, + {file = "wrapt-2.0.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6c72328f668cf4c503ffcf9434c2b71fdd624345ced7941bc6693e61bbe36bef"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3793ac154afb0e5b45d1233cb94d354ef7a983708cc3bb12563853b1d8d53747"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fec0d993ecba3991645b4857837277469c8cc4c554a7e24d064d1ca291cfb81f"}, + {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:949520bccc1fa227274da7d03bf238be15389cd94e32e4297b92337df9b7a349"}, + {file = "wrapt-2.0.1-cp314-cp314t-win32.whl", hash = "sha256:be9e84e91d6497ba62594158d3d31ec0486c60055c49179edc51ee43d095f79c"}, + {file = "wrapt-2.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:61c4956171c7434634401db448371277d07032a81cc21c599c22953374781395"}, + {file = "wrapt-2.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:35cdbd478607036fee40273be8ed54a451f5f23121bd9d4be515158f9498f7ad"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:90897ea1cf0679763b62e79657958cd54eae5659f6360fc7d2ccc6f906342183"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50844efc8cdf63b2d90cd3d62d4947a28311e6266ce5235a219d21b195b4ec2c"}, + {file = "wrapt-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49989061a9977a8cbd6d20f2efa813f24bf657c6990a42967019ce779a878dbf"}, + {file = "wrapt-2.0.1-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:09c7476ab884b74dce081ad9bfd07fe5822d8600abade571cb1f66d5fc915af6"}, + {file = "wrapt-2.0.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1a8a09a004ef100e614beec82862d11fc17d601092c3599afd22b1f36e4137e"}, + {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:89a82053b193837bf93c0f8a57ded6e4b6d88033a499dadff5067e912c2a41e9"}, + {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f26f8e2ca19564e2e1fdbb6a0e47f36e0efbab1acc31e15471fad88f828c75f6"}, + {file = "wrapt-2.0.1-cp38-cp38-win32.whl", hash = "sha256:115cae4beed3542e37866469a8a1f2b9ec549b4463572b000611e9946b86e6f6"}, + {file = "wrapt-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c4012a2bd37059d04f8209916aa771dfb564cccb86079072bdcd48a308b6a5c5"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:68424221a2dc00d634b54f92441914929c5ffb1c30b3b837343978343a3512a3"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6bd1a18f5a797fe740cb3d7a0e853a8ce6461cc62023b630caec80171a6b8097"}, + {file = "wrapt-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb3a86e703868561c5cad155a15c36c716e1ab513b7065bd2ac8ed353c503333"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5dc1b852337c6792aa111ca8becff5bacf576bf4a0255b0f05eb749da6a1643e"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c046781d422f0830de6329fa4b16796096f28a92c8aef3850674442cdcb87b7f"}, + {file = "wrapt-2.0.1-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f73f9f7a0ebd0db139253d27e5fc8d2866ceaeef19c30ab5d69dcbe35e1a6981"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b667189cf8efe008f55bbda321890bef628a67ab4147ebf90d182f2dadc78790"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:a9a83618c4f0757557c077ef71d708ddd9847ed66b7cc63416632af70d3e2308"}, + {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e9b121e9aeb15df416c2c960b8255a49d44b4038016ee17af03975992d03931"}, + {file = "wrapt-2.0.1-cp39-cp39-win32.whl", hash = "sha256:1f186e26ea0a55f809f232e92cc8556a0977e00183c3ebda039a807a42be1494"}, + {file = "wrapt-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bf4cb76f36be5de950ce13e22e7fdf462b35b04665a12b64f3ac5c1bbbcf3728"}, + {file = "wrapt-2.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:d6cc985b9c8b235bd933990cdbf0f891f8e010b65a3911f7a55179cd7b0fc57b"}, + {file = "wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca"}, + {file = "wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f"}, ] [package.extras] @@ -2060,13 +2012,13 @@ dev = ["pytest", "setuptools"] [[package]] name = "zipp" -version = "3.23.0" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, - {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] @@ -2074,7 +2026,7 @@ check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [extras] @@ -2087,5 +2039,5 @@ rdf4j = ["httpx"] [metadata] lock-version = "2.0" -python-versions = ">=3.9.2, <4.0" -content-hash = "1470834243bf70c42b04cee45e5fe0097d128b26469a1a80d348706e1979d97c" +python-versions = ">=3.8.1" +content-hash = "a9f7643c46c8fa5f969aa0554e147cc9d1ebc19284aabf9ed19991fa96f105c7" diff --git a/pyproject.toml b/pyproject.toml index 0d7582c26..4924a5af5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,10 +41,7 @@ rdfs2dot = 'rdflib.tools.rdfs2dot:main' rdfgraphisomorphism = 'rdflib.tools.graphisomorphism:main' [tool.poetry.dependencies] -# TODO: temporarily add new python version constraints for testcontainers -# We can remove the upper bound once testcontainers releases a new version -# https://github.com/testcontainers/testcontainers-python/pull/909 -python = ">=3.9.2, <4.0" +python = ">=3.8.1" isodate = {version=">=0.7.2,<1.0.0", python = "<3.11"} pyparsing = ">=2.1.0,<4" berkeleydb = {version = "^18.1.0", optional = true} @@ -67,7 +64,7 @@ coverage = {version = "^7.0.1", extras = ["toml"]} types-setuptools = ">=68.0.0.3,<72.0.0.0" setuptools = ">=68,<72" wheel = ">=0.42,<0.46" -testcontainers = "^4.13.2" +testcontainers = {version = "^4.13.2", python = ">=3.9.2"} [tool.poetry.group.docs.dependencies] typing-extensions = "^4.11.0" diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 3751fbdef..7608e4299 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -30,14 +30,16 @@ build_sparql_query_accept_header, build_spo_param, rdf_payload_to_stream, + validate_graph_name, + validate_no_bnodes, ) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.query import Result from rdflib.term import IdentifiedNode, Literal, URIRef -SubjectType = t.Union[IdentifiedNode, None] +SubjectType = t.Union[URIRef, None] PredicateType = t.Union[URIRef, None] -ObjectType = t.Union[IdentifiedNode, Literal, None] +ObjectType = t.Union[URIRef, Literal, None] @dataclass(frozen=True) @@ -198,8 +200,14 @@ def identifier(self): @staticmethod def _build_graph_name_params(graph_name: URIRef | str): params = {} - if isinstance(graph_name, URIRef) and graph_name == DATASET_DEFAULT_GRAPH_ID: - # Do nothing; GraphDB does not work with `?default=`, which is the default + if ( + isinstance(graph_name, URIRef) + and graph_name == DATASET_DEFAULT_GRAPH_ID + or isinstance(graph_name, str) + and graph_name == str(DATASET_DEFAULT_GRAPH_ID) + ): + # Do nothing; GraphDB does not work with `?default=` + # (note the trailing equal character), which is the default # behavior of httpx when setting the param value to an empty string. # httpx completely omits query parameters whose values are `None`, so that's # not an option either. @@ -231,6 +239,7 @@ def get(self, graph_name: URIRef | str) -> Graph: """ if not graph_name: raise ValueError("Graph name must be provided.") + validate_graph_name(graph_name) headers = { "Accept": self._content_type, } @@ -260,6 +269,7 @@ def add(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Graph): """ if not graph_name: raise ValueError("Graph name must be provided.") + validate_graph_name(graph_name) stream, should_close = rdf_payload_to_stream(data) headers = { "Content-Type": self._content_type, @@ -290,6 +300,7 @@ def overwrite(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Gra """ if not graph_name: raise ValueError("Graph name must be provided.") + validate_graph_name(graph_name) stream, should_close = rdf_payload_to_stream(data) headers = { "Content-Type": self._content_type, @@ -318,6 +329,7 @@ def clear(self, graph_name: URIRef | str): """ if not graph_name: raise ValueError("Graph name must be provided.") + validate_graph_name(graph_name) params = self._build_graph_name_params(graph_name) or None response = self.http_client.delete(self._build_url(graph_name), params=params) response.raise_for_status() @@ -412,9 +424,7 @@ def health(self) -> bool: f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}" ) - def size( - self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None - ) -> int: + def size(self, graph_name: URIRef | Iterable[URIRef] | str | None = None) -> int: """The number of statements in the repository or in the specified graph name. Parameters: @@ -431,6 +441,7 @@ def size( Raises: RepositoryFormatError: Fails to parse the repository size. """ + validate_graph_name(graph_name) params: dict[str, str] = {} build_context_param(params, graph_name) response = self.http_client.get( @@ -541,12 +552,16 @@ def get( subj: SubjectType = None, pred: PredicateType = None, obj: ObjectType = None, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + graph_name: URIRef | Iterable[URIRef] | str | None = None, infer: bool = True, content_type: str | None = None, ) -> Graph | Dataset: """Get RDF statements from the repository matching the filtering parameters. + !!! Note + The terms for `subj`, `pred`, `obj` or `graph_name` cannot be + [`BNodes`][rdflib.term.BNode]. + Parameters: subj: Subject of the statement to filter by, or `None` to match all. pred: Predicate of the statement to filter by, or `None` to match all. @@ -568,6 +583,7 @@ def get( A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object with the repository namespace prefixes bound to it. """ + validate_no_bnodes(subj, pred, obj, graph_name) if content_type is None: content_type = "application/n-quads" headers = {"Accept": content_type} @@ -632,7 +648,7 @@ def upload( def overwrite( self, data: str | bytes | BinaryIO | Graph | Dataset, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + graph_name: URIRef | Iterable[URIRef] | str | None = None, base_uri: str | None = None, content_type: str | None = None, ): @@ -652,7 +668,7 @@ def overwrite( `application/n-quads` when the value is `None`. """ stream, should_close = rdf_payload_to_stream(data) - + validate_graph_name(graph_name) try: headers = {"Content-Type": content_type or "application/n-quads"} params: dict[str, str] = {} @@ -675,10 +691,14 @@ def delete( subj: SubjectType = None, pred: PredicateType = None, obj: ObjectType = None, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + graph_name: URIRef | Iterable[URIRef] | str | None = None, ) -> None: """Deletes statements from the repository matching the filtering parameters. + !!! Note + The terms for `subj`, `pred`, `obj` or `graph_name` cannot be + [`BNodes`][rdflib.term.BNode]. + Parameters: subj: Subject of the statement to filter by, or `None` to match all. pred: Predicate of the statement to filter by, or `None` to match all. @@ -690,6 +710,7 @@ def delete( To query just the default graph, use [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID]. """ + validate_no_bnodes(subj, pred, obj, graph_name) params: dict[str, str] = {} build_context_param(params, graph_name) build_spo_param(params, subj, pred, obj) @@ -808,9 +829,7 @@ def ping(self): f"Transaction ping failed: {response.status_code} - {response.text}" ) - def size( - self, graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None - ): + def size(self, graph_name: URIRef | Iterable[URIRef] | str | None = None): """The number of statements in the repository or in the specified graph name. Parameters: @@ -828,6 +847,7 @@ def size( RepositoryFormatError: Fails to parse the repository size. """ self._raise_for_closed() + validate_graph_name(graph_name) params = {"action": "SIZE"} build_context_param(params, graph_name) response = self.repo.http_client.put(self.url, params=params) @@ -913,12 +933,16 @@ def get( subj: SubjectType = None, pred: PredicateType = None, obj: ObjectType = None, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None = None, + graph_name: URIRef | Iterable[URIRef] | str | None = None, infer: bool = True, content_type: str | None = None, ) -> Graph | Dataset: """Get RDF statements from the repository matching the filtering parameters. + !!! Note + The terms for `subj`, `pred`, `obj` or `graph_name` cannot be + [`BNodes`][rdflib.term.BNode]. + Parameters: subj: Subject of the statement to filter by, or `None` to match all. pred: Predicate of the statement to filter by, or `None` to match all. @@ -940,6 +964,7 @@ def get( A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object with the repository namespace prefixes bound to it. """ + validate_no_bnodes(subj, pred, obj, graph_name) if content_type is None: content_type = "application/n-quads" headers = {"Accept": content_type} diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py index ea370ad6d..9d99a8007 100644 --- a/rdflib/contrib/rdf4j/util.py +++ b/rdflib/contrib/rdf4j/util.py @@ -7,7 +7,7 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph from rdflib.plugins.sparql.processor import prepareQuery -from rdflib.term import IdentifiedNode, URIRef +from rdflib.term import BNode, IdentifiedNode, URIRef if t.TYPE_CHECKING: from rdflib.contrib.rdf4j.client import ObjectType, PredicateType, SubjectType @@ -151,3 +151,32 @@ def build_sparql_query_accept_header(query: str, headers: dict[str, str]): headers["Accept"] = "application/n-triples" else: raise ValueError(f"Unsupported query type: {prepared_query.algebra.name}") + + +def validate_graph_name(graph_name: URIRef | t.Iterable[URIRef] | str | None): + if ( + isinstance(graph_name, BNode) + or isinstance(graph_name, t.Iterable) + and any(isinstance(x, BNode) for x in graph_name) + ): + raise ValueError("Graph name must not be a BNode.") + + +def validate_no_bnodes( + subj: SubjectType, + pred: PredicateType, + obj: ObjectType, + graph_name: URIRef | t.Iterable[URIRef] | str | None, +) -> None: + """Validate that the subject, predicate, and object are not BNodes.""" + if ( + isinstance(subj, BNode) + or isinstance(pred, BNode) + or isinstance(obj, BNode) + or isinstance(graph_name, BNode) + ): + raise ValueError( + "Subject, predicate, and object must not be a BNode: " + f"{subj}, {pred}, {obj}" + ) + validate_graph_name(graph_name) diff --git a/rdflib/plugins/stores/rdf4j.py b/rdflib/plugins/stores/rdf4j.py new file mode 100644 index 000000000..7f1bb5cad --- /dev/null +++ b/rdflib/plugins/stores/rdf4j.py @@ -0,0 +1,223 @@ +from __future__ import annotations + +from textwrap import dedent +from typing import Any, Generator, Iterable, Iterator, Mapping, Optional, Tuple + +from rdflib import Graph +from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import RepositoryNotFoundError +from rdflib.graph import ( + DATASET_DEFAULT_GRAPH_ID, + Dataset, + _ContextType, + _QuadType, + _TriplePatternType, + _TripleType, +) +from rdflib.store import VALID_STORE, Store +from rdflib.term import BNode, Node, URIRef, Variable + +if has_httpx: + from rdflib.contrib.rdf4j import RDF4JClient + + +def _inject_prefixes(query: str, extra_bindings: Mapping[str, Any]) -> str: + bindings = set(list(extra_bindings.items())) + if not bindings: + return query + return "\n".join( + [ + "\n".join(["PREFIX %s: <%s>" % (k, v) for k, v in bindings]), + "", # separate ns_bindings from query with an empty line + query, + ] + ) + + +def _node_to_sparql(node: Node) -> str: + if isinstance(node, BNode): + raise Exception( + "SPARQL-based stores do not support BNodes" + "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes" + ) + return node.n3() + + +def _default_repo_config(repository_id: str) -> str: + return dedent( + f""" + PREFIX config: + + [] a config:Repository ; + config:rep.id "{repository_id}" ; + config:rep.impl + [ + config:rep.type "openrdf:SailRepository" ; + config:sail.impl + [ + config:native.tripleIndexers "spoc,posc" ; + config:sail.defaultQueryEvaluationMode "STANDARD" ; + config:sail.iterationCacheSyncThreshold "10000" ; + config:sail.type "openrdf:NativeStore" ; + ] ; + ] ; + . + """ + ) + + +class RDF4JStore(Store): + """An RDF4J store.""" + + context_aware = True + formula_aware = False + transaction_aware = False + graph_aware = True + + def __init__( + self, + base_url: str, + repository_id: str, + configuration: str | None = None, + auth: tuple[str, str] | None = None, + timeout: float = 30.0, + create: bool = False, + **kwargs, + ): + if configuration is None: + configuration = _default_repo_config(repository_id) + self._client = RDF4JClient(base_url, auth, timeout, **kwargs) + self._repository_id = repository_id + self._repo = None + self.open(configuration, create) + super().__init__() + + @property + def client(self): + return self._client + + @property + def repo(self): + if self._repo is None: + self._repo = self.client.repositories.get(self._repository_id) + return self._repo + + def open( + self, configuration: str | tuple[str, str] | None, create: bool = False + ) -> int | None: + try: + # Try connecting to the repository. + self.repo.health() + except RepositoryNotFoundError: + if create: + self.client.repositories.create(self._repository_id, configuration) + self.repo.health() + else: + raise Exception(f"Repository {self._repository_id} not found.") + + return VALID_STORE + + def close(self, commit_pending_transaction: bool = False) -> None: + self.client.close() + + def add( + self, + triple: _TripleType, + context: _ContextType | None = None, + quoted: bool = False, + ) -> None: + s, p, o = triple + graph_name = ( + "" + if context is None or context.identifier == DATASET_DEFAULT_GRAPH_ID + else context.identifier.n3() + ) + statement = f"{s.n3()} {p.n3()} {o.n3()} {graph_name} ." + self.repo.upload(statement) + + def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 + statements = "" + for s, p, o, c in quads: + graph_name = ( + "" + if c is None or c.identifier == DATASET_DEFAULT_GRAPH_ID + else c.identifier.n3() + ) + statement = f"{s.n3()} {p.n3()} {o.n3()} {graph_name} .\n" + statements += statement + self.repo.upload(statements) + + def remove( + self, + triple: _TriplePatternType, + context: Optional[_ContextType] = None, + ) -> None: + s, p, o = triple + g = context.identifier if context is not None else None + self.repo.delete(s, p, o, g) + + def triples( + self, + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, + ) -> Iterator[Tuple[_TripleType, Iterator[Optional[_ContextType]]]]: + s, p, o = triple_pattern + graph_name = context.identifier if context is not None else None + result_graph = self.repo.get(s, p, o, graph_name) + if isinstance(result_graph, Dataset): + for s, p, o, g in result_graph: + yield (s, p, o), iter([Graph(self, identifier=g)]) + else: + # It's a Graph object. + for triple in result_graph: + # Returning None for _ContextType as it's not used by the caller. + yield triple, iter([None]) + + def contexts( + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: + if triple is None: + for graph_name in self.repo.graph_names(): + yield Graph(self, identifier=graph_name) + else: + s, p, o = triple + params = ( + _node_to_sparql(s if s else Variable("s")), + _node_to_sparql(p if p else Variable("p")), + _node_to_sparql(o if o else Variable("o")), + ) + query = ( + "SELECT DISTINCT ?graph WHERE { GRAPH ?graph { %s %s %s } }" % params + ) + result = self.repo.query(query) + for row in result: + yield Graph(self, identifier=row["graph"]) + + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: + # Note: RDF4J namespaces always override. + self.repo.namespaces.set(prefix, namespace) + + def prefix(self, namespace: URIRef) -> Optional[str]: + namespace_prefixes = dict( + [(x.namespace, x.prefix) for x in self.repo.namespaces.list()] + ) + return namespace_prefixes.get(str(namespace)) + + def namespace(self, prefix: str) -> Optional[URIRef]: + result = self.repo.namespaces.get(prefix) + return URIRef(result) if result is not None else None + + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: + for result in self.repo.namespaces.list(): + yield result.prefix, URIRef(result.namespace) + + def add_graph(self, graph: Graph) -> None: + if graph.identifier != DATASET_DEFAULT_GRAPH_ID: + # Note: this is a no-op since RDF4J doesn't support empty named graphs. + self.repo.update(f"CREATE SILENT GRAPH {graph.identifier.n3()}") + + def remove_graph(self, graph: Graph) -> None: + self.repo.graphs.clear(graph.identifier) + + def __len__(self, context: _ContextType | None = None) -> int: + return self.repo.size(context if context is None else context.identifier) diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py index 90de67eec..9a087f205 100644 --- a/test/test_rdf4j/test_e2e/conftest.py +++ b/test/test_rdf4j/test_e2e/conftest.py @@ -1,22 +1,31 @@ import pathlib +from importlib.util import find_spec import pytest -from testcontainers.core.container import DockerContainer -from testcontainers.core.image import DockerImage -from testcontainers.core.waiting_utils import wait_for_logs +from rdflib import Dataset from rdflib.contrib.rdf4j import has_httpx +from rdflib.contrib.rdf4j.exceptions import RepositoryNotFoundError +from rdflib.namespace import NamespaceManager +from rdflib.plugins.stores.rdf4j import RDF4JStore + +has_testcontainers = find_spec("testcontainers") is not None pytestmark = pytest.mark.skipif( - not has_httpx, reason="skipping rdf4j tests, httpx not available" + not (has_httpx and has_testcontainers), + reason="skipping rdf4j tests, httpx or testcontainers not available", ) -if has_httpx: +if has_httpx and has_testcontainers: + from testcontainers.core.container import DockerContainer + from testcontainers.core.image import DockerImage + from testcontainers.core.waiting_utils import wait_for_logs + from rdflib.contrib.rdf4j import RDF4JClient GRAPHDB_PORT = 7200 - @pytest.fixture(scope="function") + @pytest.fixture(scope="package") def graphdb_container(): with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image: container = DockerContainer(str(image)) @@ -33,6 +42,10 @@ def client(graphdb_container: DockerContainer): f"http://localhost:{port}/", auth=("admin", "admin") ) as client: yield client + try: + client.repositories.delete("test-repo") + except (RepositoryNotFoundError, RuntimeError): + pass @pytest.fixture(scope="function") def repo(client: RDF4JClient): @@ -45,3 +58,19 @@ def repo(client: RDF4JClient): repo = client.repositories.create("test-repo", config) assert repo.identifier == "test-repo" yield repo + client.repositories.delete("test-repo") + + @pytest.fixture(scope="function") + def ds(graphdb_container: DockerContainer): + port = graphdb_container.get_exposed_port(7200) + store = RDF4JStore( + f"http://localhost:{port}/", + "test-repo", + auth=("admin", "admin"), + create=True, + ) + ds = Dataset(store) + ds.namespace_manager = NamespaceManager(ds, "none") + yield ds + ds.store.client.repositories.delete("test-repo") # type: ignore[attr-defined] + ds.close() diff --git a/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py b/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py new file mode 100644 index 000000000..9f3c47a8b --- /dev/null +++ b/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py @@ -0,0 +1,528 @@ +from __future__ import annotations + +import typing as t + +import pytest + +from rdflib import RDF, SKOS, BNode, Dataset, Graph, Literal, URIRef, Variable +from rdflib.contrib.rdf4j import has_httpx +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, _TripleChoiceType + +pytestmark = pytest.mark.skipif( + not has_httpx, reason="skipping rdf4j tests, httpx not available" +) + +if has_httpx and t.TYPE_CHECKING: + from rdflib.contrib.rdf4j.client import Repository + + +@pytest.mark.testcontainer +def test_rdf4j_store_add(ds: Dataset): + assert len(ds) == 0 + ds.add((URIRef("http://example.com/s"), RDF.type, SKOS.Concept)) + assert len(ds) == 1 + + +@pytest.mark.testcontainer +def test_rdf4j_store_addn(ds: Dataset): + assert len(ds) == 0 + ds.addN( + [ + ( + URIRef("http://example.com/s"), + RDF.type, + SKOS.Concept, + URIRef("urn:graph:a"), # type: ignore[list-item] + ), + ( + URIRef("http://example.com/s"), + SKOS.prefLabel, + Literal("Label"), + DATASET_DEFAULT_GRAPH_ID, # type: ignore[list-item] + ), + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + URIRef("urn:graph:b"), # type: ignore[list-item] + ), + ] + ) + assert len(ds) == 3 + + +@pytest.mark.testcontainer +def test_graphs_method_default_graph(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + """ + # This returns 1 graph, the default graph, even when there are no triples. + graphs = list(ds.graphs()) + assert len(graphs) == 1 + assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID + repo.upload(data) + graphs = list(ds.graphs()) + assert len(graphs) == 1 + graph = graphs[0] + assert graph.identifier == DATASET_DEFAULT_GRAPH_ID + assert len(graph) == 1 + + +@pytest.mark.testcontainer +def test_graphs_method_default_and_named_graphs(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + # This returns 1 graph, the default graph, even when there are no triples. + graphs = list(ds.graphs()) + assert len(graphs) == 1 + assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID + repo.upload(data) + + # Retrieve graphs with no triple pattern. + graphs = list(ds.graphs()) + assert len(graphs) == 3 + + graph_a = graphs[0] + assert graph_a.identifier == URIRef("urn:graph:b") + assert len(graph_a) == 1 + + graph_b = graphs[1] + assert graph_b.identifier == URIRef("urn:graph:a") + assert len(graph_b) == 2 + + default_graph = graphs[2] + assert default_graph.identifier == DATASET_DEFAULT_GRAPH_ID + assert len(default_graph) == 1 + + # Retrieve graphs with a triple pattern. + graphs = list( + ds.graphs(triple=(URIRef("http://example.com/s"), RDF.type, SKOS.Concept)) + ) + # Note: it's returning 2 graphs instead of 1 because the Dataset class always + # includes the default graph. + # I don't think this is the correct behaviour. TODO: raise a ticket for this. + # What should happen is, ds.graphs() includes the default graph if the triple + # pattern is None. Otherwise, it should only include graphs that contain the triple. + assert len(graphs) == 2 + graph_a = graphs[0] + assert graph_a.identifier == URIRef("urn:graph:a") + assert len(graph_a) == 2 + + +@pytest.mark.testcontainer +def test_add_graph(ds: Dataset): + assert len(ds) == 0 + graphs = list(ds.graphs()) + assert len(graphs) == 1 + assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID + + graph_name = URIRef("urn:graph:a") + + # Add a graph to the dataset using a URIRef. + # Note, this is a no-op since RDF4J doesn't support named graphs with no statements, + # which is why the length of the graphs is 1 (the default graph). + ds.add_graph(graph_name) + graphs = list(ds.graphs()) + assert len(graphs) == 1 + assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID + + # Add a graph object to the dataset. + # This will create a new graph in RDF4J, along with the statements. + graph = Graph(identifier=graph_name) + graph.add((URIRef("http://example.com/s"), RDF.type, SKOS.Concept)) + graph.add((URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label"))) + ds.add_graph(graph) + # Verify that the graph was added. + graphs = list(ds.graphs()) + assert len(graphs) == 2 + graph_a = graphs[0] + assert graphs[1].identifier == DATASET_DEFAULT_GRAPH_ID + assert graph_a.identifier == graph_name + assert len(graph_a) == 2 + + +@pytest.mark.testcontainer +def test_remove_graph(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + # This returns 1 graph, the default graph, even when there are no triples. + graphs = list(ds.graphs()) + assert len(graphs) == 1 + assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID + repo.upload(data) + assert len(ds) == 4 + + ds.remove_graph(URIRef("urn:graph:a")) + assert len(ds) == 2 + graphs = list(ds.graphs()) + assert len(graphs) == 2 + assert graphs[0].identifier == URIRef("urn:graph:b") + assert graphs[1].identifier == DATASET_DEFAULT_GRAPH_ID + + +@pytest.mark.testcontainer +def test_namespaces(ds: Dataset): + assert list(ds.namespaces()) == [] + + skos_namespace = URIRef(str(SKOS)) + ds.bind("skos", skos_namespace) + assert list(ds.namespaces()) == [("skos", skos_namespace)] + assert ds.store.namespace("skos") == skos_namespace + assert ds.store.namespace("foo") is None + assert ds.store.prefix(skos_namespace) == "skos" + assert ds.store.prefix(URIRef("http://example.com/")) is None + + +@pytest.mark.testcontainer +def test_triples(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 4 + + # We don't have default_union enabled, returns the single statement from the + # default graph. + triples = set(ds.triples((None, None, None))) + assert triples == { + (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")), + } + + # Enable default_union, returns all distinct statements. + ds.default_union = True + triples = set(ds.triples((None, None, None))) + assert triples == { + (URIRef("http://example.com/s"), RDF.type, SKOS.Concept), + (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")), + (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")), + } + + # Triple pattern, return only the matching statements. + triples = set(ds.triples((None, SKOS.prefLabel, None))) + assert triples == { + (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")), + } + + # Disable default_union, returns no statements. + ds.default_union = False + triples = set(ds.triples((None, SKOS.prefLabel, None))) + assert triples == set() + + # Triple pattern, return matching statements in the default graph. + triples = set(ds.triples((None, SKOS.definition, None))) + assert triples == { + (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")), + } + + +@pytest.mark.testcontainer +def test_quads(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 4 + + quads = set(ds.quads((None, None, None, DATASET_DEFAULT_GRAPH_ID))) # type: ignore[arg-type] + assert quads == { + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + DATASET_DEFAULT_GRAPH_ID, + ), + } + + quads = set(ds.quads((None, None, None, URIRef("urn:graph:a")))) # type: ignore[arg-type] + assert quads == { + ( + URIRef("http://example.com/s"), + RDF.type, + SKOS.Concept, + URIRef("urn:graph:a"), + ), + ( + URIRef("http://example.com/s"), + SKOS.prefLabel, + Literal("Label"), + URIRef("urn:graph:a"), + ), + } + + quads = set(ds.quads((None, None, None, URIRef("urn:graph:b")))) # type: ignore[arg-type] + assert quads == { + ( + URIRef("http://example.com/s"), + SKOS.prefLabel, + Literal("Label"), + URIRef("urn:graph:b"), + ) + } + + +@pytest.mark.testcontainer +@pytest.mark.parametrize( + "s, p, o, g, expected_size", + [ + [None, None, None, None, 0], + [URIRef("http://example.com/s"), None, None, None, 0], + [None, RDF.type, None, None, 3], + [None, SKOS.prefLabel, None, None, 2], + [None, SKOS.prefLabel, None, URIRef("urn:graph:a"), 3], + [None, None, None, DATASET_DEFAULT_GRAPH_ID, 3], + ], +) +def test_remove(ds: Dataset, s, p, o, g, expected_size): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 4 + repo.delete(s, p, o, g) + assert len(ds) == expected_size + + +@pytest.mark.testcontainer +@pytest.mark.parametrize( + "default_union, triples_choices, expected_triples", + [ + [ + False, + (None, [SKOS.prefLabel, SKOS.definition], None), + { + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + ), + }, + ], + [ + True, + (None, [SKOS.prefLabel, SKOS.definition], None), + { + (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")), + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + ), + }, + ], + [ + True, + (None, [RDF.type, SKOS.prefLabel], None), + { + (URIRef("http://example.com/s"), RDF.type, SKOS.Concept), + (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")), + }, + ], + [ + True, + (None, [RDF.type, SKOS.definition], None), + { + (URIRef("http://example.com/s"), RDF.type, SKOS.Concept), + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + ), + }, + ], + ], +) +def test_triples_choices_default_union_on( + ds: Dataset, + default_union: bool, + triples_choices: _TripleChoiceType, + expected_triples, +): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 4 + ds.default_union = default_union + triples = set(ds.triples_choices(triples_choices)) + assert triples == expected_triples + + +@pytest.mark.testcontainer +@pytest.mark.parametrize( + "default_union, query, expected_result_bindings", + [ + [ + False, + "select * where { ?s ?p ?o }", + {(URIRef("http://example.com/s"), SKOS.definition, Literal("Definition"))}, + ], + [ + True, + "select * where { ?s ?p ?o }", + { + (URIRef("http://example.com/s"), RDF.type, SKOS.Concept), + (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")), + ( + URIRef("http://example.com/s"), + SKOS.definition, + Literal("Definition"), + ), + }, + ], + ], +) +def test_query_default_graph_behaviour( + ds: Dataset, + default_union: bool, + query: str, + expected_result_bindings: list[dict[Variable, URIRef | BNode | Literal]], +): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 4 + ds.default_union = default_union + result = ds.query(query) + assert set(tuple(x.values()) for x in result.bindings) == expected_result_bindings + + +@pytest.mark.testcontainer +def test_query_init_ns(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{RDF.type}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 5 + query = """ + select distinct ?s + where { + graph ?g { + ?s a ex:Term . + } + } + """ + result = ds.query(query, initNs={"ex": "http://example.com/type/"}) + assert len(result) == 1 + assert set(tuple(x.values()) for x in result.bindings) == { + (URIRef("http://example.com/s"),) + } + + +@pytest.mark.testcontainer +def test_query_init_bindings(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{RDF.type}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 5 + + query = """ + SELECT ?o + WHERE { + GRAPH ?g { + ?s ?p ?o . + } + } + """ + result = ds.query(query, initBindings={"p": RDF.type}) + assert len(result) == 2 + assert set(tuple(x.values()) for x in result.bindings) == { + (SKOS.Concept,), + (URIRef("http://example.com/type/Term"),), + } + + +@pytest.mark.testcontainer +def test_query_update_delete_default_graph_triples(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{RDF.type}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 5 + + query = """ + DELETE { + ?s ?p ?o + } + WHERE { + ?s ?p ?o + } + """ + ds.update(query) + assert len(ds) == 4 + + +@pytest.mark.testcontainer +def test_query_update(ds: Dataset): + repo: Repository = ds.store.repo # type: ignore[attr-defined] + data = f""" + <{RDF.type}> <{SKOS.Concept}> . + <{RDF.type}> . + <{SKOS.prefLabel}> "Label" . + <{SKOS.prefLabel}> "Label" . + <{SKOS.definition}> "Definition" . + """ + repo.upload(data) + assert len(ds) == 5 + + query = """ + DELETE { + GRAPH ?g { + ?s ?p ?o + } + } + WHERE { + GRAPH ?g { + ?s ?p ?o + } + } + """ + ds.update(query) + assert len(ds) == 1 diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py index ed25eb25a..dba939156 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py @@ -176,3 +176,41 @@ def raise_for_status(self): monkeypatch.setattr(httpx.Client, "post", lambda *args, **kwargs: MockResponse()) with pytest.raises(RepositoryNotHealthyError): repo.health() + + +@pytest.mark.xfail( + reason="RDF4J REST API does not support referencing blank nodes directly." +) +def test_repo_delete_statement_with_bnode(client: RDF4JClient): + config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + assert repo.health() + with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file: + repo.overwrite(file) + + assert repo.size() == 1 + repo.delete(subj=BNode("b-test")) # type: ignore[arg-type] + assert repo.size() == 0 + + +@pytest.mark.xfail( + reason="RDF4J REST API does not support referencing blank nodes directly." +) +def test_repo_delete_statement_with_bnode_graph(client: RDF4JClient): + config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl" + with open(config_path) as file: + config = file.read() + + repo = client.repositories.create("test-repo", config) + assert repo.identifier == "test-repo" + assert repo.health() + with open(pathlib.Path(__file__).parent.parent / "data/quads-3.nq", "rb") as file: + repo.overwrite(file) + + assert repo.size() == 1 + repo.delete(subj=BNode("graph")) # type: ignore[arg-type] + assert repo.size() == 0 diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py index 6d28e3aa4..1b58165e7 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py @@ -15,6 +15,7 @@ from rdflib.contrib.rdf4j.client import Repository +@pytest.mark.testcontainer def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py index edece3d16..a4d9bb0ac 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py @@ -16,6 +16,7 @@ from rdflib.contrib.rdf4j.client import Repository, Transaction +@pytest.mark.testcontainer def test_e2e_repo_transaction(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) @@ -56,6 +57,7 @@ def test_e2e_repo_transaction(repo: Repository): assert repo.size() == 3 +@pytest.mark.testcontainer def test_e2e_repo_transaction_delete(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) @@ -70,6 +72,7 @@ def test_e2e_repo_transaction_delete(repo: Repository): assert txn.size("urn:graph:a2") == 0 +@pytest.mark.testcontainer def test_e2e_repo_transaction_update(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) @@ -82,6 +85,7 @@ def test_e2e_repo_transaction_update(repo: Repository): assert txn.size("urn:graph:a2") == 1 +@pytest.mark.testcontainer def test_e2e_repo_transaction_get(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py index 78f132c0a..12313b901 100644 --- a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py +++ b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py @@ -14,6 +14,7 @@ from rdflib.contrib.rdf4j.client import Repository +@pytest.mark.testcontainer def test_e2e_repo_query(repo: Repository): path = str(Path(__file__).parent.parent / "data/quads-1.nq") repo.overwrite(path) diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py index 97e741ef1..492d40bf9 100644 --- a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py +++ b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py @@ -1,13 +1,19 @@ +from importlib.util import find_spec + import pytest -from testcontainers.core.container import DockerContainer from rdflib.contrib.rdf4j import has_httpx +has_testcontainers = find_spec("testcontainers") is not None + pytestmark = pytest.mark.skipif( - not has_httpx, reason="skipping rdf4j tests, httpx not available" + not (has_httpx and has_testcontainers), + reason="skipping rdf4j tests, httpx or testcontainers not available", ) -if has_httpx: +if has_httpx and has_testcontainers: + from testcontainers.core.container import DockerContainer + from rdflib.contrib.graphdb import GraphDBClient @pytest.fixture(scope="function") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_delete.py index 390c0aa95..287cdb224 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_delete.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_delete.py @@ -7,7 +7,7 @@ from rdflib.contrib.rdf4j import has_httpx from rdflib.graph import DATASET_DEFAULT_GRAPH_ID -from rdflib.term import BNode, IdentifiedNode, URIRef +from rdflib.term import URIRef pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -38,16 +38,6 @@ }, ], [None, None, None, {}], - [ - BNode("some-bnode"), - URIRef("http://example.com/p"), - BNode("some-bnode-2"), - { - "subj": "_:some-bnode", - "pred": "", - "obj": "_:some-bnode-2", - }, - ], ], ) def test_repo_delete_spo( @@ -76,17 +66,12 @@ def test_repo_delete_spo( [DATASET_DEFAULT_GRAPH_ID, "null"], ["http://example.com/graph", ""], [URIRef("http://example.com/graph"), ""], - [BNode("some-bnode"), "_:some-bnode"], - [ - [URIRef("http://example.com/graph"), BNode("some-bnode")], - ",_:some-bnode", - ], [None, None], ], ) def test_repo_delete_graph_name( repo: Repository, - graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + graph_name: URIRef | t.Iterable[URIRef] | str | None, expected_graph_name_param: str, monkeypatch: pytest.MonkeyPatch, ): diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index b5a46c1e3..b1b8fe066 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -8,7 +8,7 @@ from rdflib import Dataset, Graph from rdflib.contrib.rdf4j import has_httpx from rdflib.graph import DATASET_DEFAULT_GRAPH_ID -from rdflib.term import BNode, IdentifiedNode, URIRef +from rdflib.term import URIRef pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -98,18 +98,13 @@ def test_repo_content_type( [DATASET_DEFAULT_GRAPH_ID, "null"], ["http://example.com/graph", ""], [URIRef("http://example.com/graph"), ""], - [BNode("some-bnode"), "_:some-bnode"], - [ - [URIRef("http://example.com/graph"), BNode("some-bnode")], - ",_:some-bnode", - ], [None, None], ], ) def test_repo_get_graph_name( repo: Repository, monkeypatch: pytest.MonkeyPatch, - graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + graph_name: URIRef | t.Iterable[URIRef] | str | None, expected_graph_name_param: str, ): """ @@ -177,16 +172,6 @@ def test_repo_get_infer( }, ], [None, None, None, {}], - [ - BNode("some-bnode"), - URIRef("http://example.com/p"), - BNode("some-bnode-2"), - { - "subj": "_:some-bnode", - "pred": "", - "obj": "_:some-bnode-2", - }, - ], ], ) def test_repo_get_spo( diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py index e378c9d1d..f45cfa33c 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py @@ -7,7 +7,7 @@ import pytest -from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef +from rdflib import Dataset, Graph, URIRef from rdflib.contrib.rdf4j import has_httpx from rdflib.graph import DATASET_DEFAULT_GRAPH_ID @@ -131,17 +131,12 @@ def test_repo_overwrite_data( [DATASET_DEFAULT_GRAPH_ID, "null"], ["http://example.com/graph", ""], [URIRef("http://example.com/graph"), ""], - [BNode("some-bnode"), "_:some-bnode"], - [ - [URIRef("http://example.com/graph"), BNode("some-bnode")], - ",_:some-bnode", - ], [None, None], ], ) def test_repo_overwrite_graph_name( repo: Repository, - graph_name: IdentifiedNode | Iterable[IdentifiedNode] | str | None, + graph_name: URIRef | Iterable[URIRef] | str | None, expected_graph_name_param: str, monkeypatch: pytest.MonkeyPatch, ): diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py index 3370ca9e3..5a7c4fb85 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_size.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py @@ -8,7 +8,7 @@ from rdflib.contrib.rdf4j import has_httpx from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError from rdflib.graph import DATASET_DEFAULT_GRAPH_ID -from rdflib.term import BNode, IdentifiedNode, URIRef +from rdflib.term import URIRef pytestmark = pytest.mark.skipif( not has_httpx, reason="skipping rdf4j tests, httpx not available" @@ -28,18 +28,13 @@ [DATASET_DEFAULT_GRAPH_ID, "null"], ["http://example.com/graph", ""], [URIRef("http://example.com/graph"), ""], - [BNode("some-bnode"), "_:some-bnode"], - [ - [URIRef("http://example.com/graph"), BNode("some-bnode")], - ",_:some-bnode", - ], [None, None], ], ) def test_repo_size_graph_name( repo: Repository, monkeypatch: pytest.MonkeyPatch, - graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + graph_name: URIRef | t.Iterable[URIRef] | str | None, expected_graph_name_param: str, ): """ diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py index 28e3c3f1d..9cce73d89 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -5,7 +5,7 @@ import pytest -from rdflib import BNode, Dataset, Graph, IdentifiedNode, URIRef +from rdflib import Dataset, Graph, URIRef from rdflib.contrib.rdf4j import has_httpx from rdflib.graph import DATASET_DEFAULT_GRAPH_ID @@ -113,18 +113,13 @@ def test_repo_transaction_get_content_type( [DATASET_DEFAULT_GRAPH_ID, "null"], ["http://example.com/graph", ""], [URIRef("http://example.com/graph"), ""], - [BNode("some-bnode"), "_:some-bnode"], - [ - [URIRef("http://example.com/graph"), BNode("some-bnode")], - ",_:some-bnode", - ], [None, None], ], ) def test_repo_transaction_get_graph_name( txn: Transaction, monkeypatch: pytest.MonkeyPatch, - graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None, + graph_name: URIRef | t.Iterable[URIRef] | str | None, expected_graph_name_param: str, ): """ @@ -201,17 +196,6 @@ def test_repo_transaction_get_infer( "action": "GET", }, ], - [ - BNode("some-bnode"), - URIRef("http://example.com/p"), - BNode("some-bnode-2"), - { - "action": "GET", - "subj": "_:some-bnode", - "pred": "", - "obj": "_:some-bnode-2", - }, - ], ], ) def test_repo_transaction_get_spo( diff --git a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py index 7fd742cf5..5ebff39e3 100644 --- a/test/test_rdf4j/test_unit/util/test_rdf4j_util.py +++ b/test/test_rdf4j/test_unit/util/test_rdf4j_util.py @@ -13,9 +13,10 @@ build_context_param, build_sparql_query_accept_header, rdf_payload_to_stream, + validate_no_bnodes, ) from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph -from rdflib.term import BNode, URIRef +from rdflib.term import BNode, Literal, URIRef @pytest.mark.parametrize( @@ -73,3 +74,23 @@ def test_build_sparql_query_accept_header(monkeypatch: pytest.MonkeyPatch): with pytest.raises(ValueError, match="Unsupported query type: InvalidQueryType"): build_sparql_query_accept_header("blah", {}) + + +@pytest.mark.parametrize( + "subj, pred, obj, graph_name", + [ + [BNode(), None, None, None], + [None, BNode(), None, None], + [None, None, BNode(), None], + [None, None, None, BNode()], + [None, None, None, [BNode(), URIRef("http://example.com/graph")]], + ], +) +def test_validate_no_bnodes( + subj: URIRef | None, + pred: URIRef | None, + obj: URIRef | Literal | None, + graph_name, +): + with pytest.raises(ValueError, match="must not be a BNode"): + validate_no_bnodes(subj, pred, obj, graph_name) diff --git a/tox.ini b/tox.ini index 87f322453..e5ebef9de 100644 --- a/tox.ini +++ b/tox.ini @@ -22,6 +22,7 @@ commands_pre = py3{8,9,10,11,12,13,14}: python -c 'import os; print("\n".join(f"{key}={value}" for key, value in os.environ.items()))' py3{8,9,10,11,12,13,14}: poetry check --lock py3{8,9,10,11,12,13,14}: poetry install --no-root --only=main --only=dev --only=lint --only=tests {env:POETRY_ARGS_docs:} {env:POETRY_ARGS_extensive:} {env:POETRY_ARGS_lxml:} {env:POETRY_ARGS:} --sync + py3{9,10,11,12,13,14}: poetry install --extras rdf4j --sync commands = min: python -c 'import sys; print("min qualifier not supported on this environment"); sys.exit(1);' poetry config --list From 174e9f91455d39bc94cfe654d76eaa037d4db853 Mon Sep 17 00:00:00 2001 From: Edmond Chuc Date: Fri, 14 Nov 2025 17:56:50 +1000 Subject: [PATCH 54/54] chore: rename RDF4J's NamespaceManager class --- rdflib/contrib/rdf4j/client.py | 8 ++++---- .../test_rdf4j/test_unit/repository/test_repo_get.py | 10 +++++----- .../test_unit/repository/test_repo_namespace_get.py | 4 ++-- .../repository/test_repo_transaction_get.py | 12 ++++++------ 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py index 7608e4299..41b97b4e9 100644 --- a/rdflib/contrib/rdf4j/client.py +++ b/rdflib/contrib/rdf4j/client.py @@ -50,7 +50,7 @@ class NamespaceListingResult: namespace: str -class NamespaceManager: +class RDF4JNamespaceManager: """A namespace manager for RDF4J repositories. Parameters: @@ -365,7 +365,7 @@ class Repository: def __init__(self, identifier: str, http_client: httpx.Client): self._identifier = identifier self._http_client = http_client - self._namespace_manager: NamespaceManager | None = None + self._namespace_manager: RDF4JNamespaceManager | None = None self._graph_store_manager: GraphStoreManager | None = None @property @@ -378,10 +378,10 @@ def identifier(self): return self._identifier @property - def namespaces(self) -> NamespaceManager: + def namespaces(self) -> RDF4JNamespaceManager: """Namespace manager for the repository.""" if self._namespace_manager is None: - self._namespace_manager = NamespaceManager( + self._namespace_manager = RDF4JNamespaceManager( self.identifier, self.http_client ) return self._namespace_manager diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py index b1b8fe066..47f9e6d01 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py @@ -18,9 +18,9 @@ import httpx from rdflib.contrib.rdf4j.client import ( - NamespaceManager, ObjectType, PredicateType, + RDF4JNamespaceManager, Repository, SubjectType, ) @@ -79,7 +79,7 @@ def test_repo_content_type( mock_response = Mock(spec=httpx.Response, text=data) mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) result = repo.get(content_type=content_type) headers = {"Accept": content_type or "application/n-quads"} @@ -114,7 +114,7 @@ def test_repo_get_graph_name( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -141,7 +141,7 @@ def test_repo_get_infer( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -186,7 +186,7 @@ def test_repo_get_spo( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py index f2a586dab..5d1d6a19e 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py @@ -16,7 +16,7 @@ from rdflib.contrib.rdf4j.client import ( NamespaceListingResult, - NamespaceManager, + RDF4JNamespaceManager, Repository, ) @@ -69,7 +69,7 @@ def test_repo_get_with_namespace_binding( mock_httpx_get = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "get", mock_httpx_get) monkeypatch.setattr( - NamespaceManager, + RDF4JNamespaceManager, "list", lambda _: [ NamespaceListingResult(prefix="test", namespace="http://example.org/test/") diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py index 9cce73d89..a853977bb 100644 --- a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py +++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py @@ -17,9 +17,9 @@ import httpx from rdflib.contrib.rdf4j.client import ( - NamespaceManager, ObjectType, PredicateType, + RDF4JNamespaceManager, SubjectType, Transaction, ) @@ -32,7 +32,7 @@ def test_repo_transaction_get(txn: Transaction, monkeypatch: pytest.MonkeyPatch) ) mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) txn.get(pred=URIRef("http://example.org/p")) mock_httpx_put.assert_called_once_with( txn.url, @@ -94,7 +94,7 @@ def test_repo_transaction_get_content_type( mock_response = Mock(spec=httpx.Response, text=data) mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) result = txn.get(content_type=content_type) headers = {"Accept": content_type or "application/n-quads"} @@ -129,7 +129,7 @@ def test_repo_transaction_get_graph_name( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -157,7 +157,7 @@ def test_repo_transaction_get_infer( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", } @@ -210,7 +210,7 @@ def test_repo_transaction_get_spo( mock_response = Mock(spec=httpx.Response, text="") mock_httpx_put = Mock(return_value=mock_response) monkeypatch.setattr(httpx.Client, "put", mock_httpx_put) - monkeypatch.setattr(NamespaceManager, "list", lambda _: []) + monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: []) headers = { "Accept": "application/n-quads", }