-
Notifications
You must be signed in to change notification settings - Fork 1.1k
PYTHON-4928 Convert CSFLE spec tests to unified test format #2520
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 10 commits
900db2f
d0ac656
a95a34c
54494e1
34dc0c6
cc53b94
85c1fcf
197d50e
2b339ac
15ffcea
cbfdb2d
3a4520e
52d0149
994a8df
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -255,6 +255,10 @@ def _handle_placeholders(self, spec: dict, current: dict, path: str) -> Any: | |
| raise ValueError(f"Could not find a placeholder value for {path}") | ||
| return PLACEHOLDER_MAP[path] | ||
|
|
||
| # Distinguish between temp and non-temp aws credentials. | ||
| if path.endswith("/kmsProviders/aws") and "sessionToken" in current: | ||
| path = path.replace("aws", "aws_temp") | ||
|
|
||
| for key in list(current): | ||
| value = current[key] | ||
| if isinstance(value, dict): | ||
|
|
@@ -275,10 +279,8 @@ async def _create_entity(self, entity_spec, uri=None): | |
| if "autoEncryptOpts" in spec: | ||
| auto_encrypt_opts = spec["autoEncryptOpts"].copy() | ||
| auto_encrypt_kwargs: dict = dict(kms_tls_options=DEFAULT_KMS_TLS) | ||
| kms_providers = ALL_KMS_PROVIDERS.copy() | ||
| kms_providers = auto_encrypt_opts.pop("kmsProviders", ALL_KMS_PROVIDERS.copy()) | ||
| key_vault_namespace = auto_encrypt_opts.pop("keyVaultNamespace") | ||
| for provider_name, provider_value in auto_encrypt_opts.pop("kmsProviders").items(): | ||
| kms_providers[provider_name].update(provider_value) | ||
| extra_opts = auto_encrypt_opts.pop("extraOptions", {}) | ||
| for key, value in extra_opts.items(): | ||
| auto_encrypt_kwargs[camel_to_snake(key)] = value | ||
|
|
@@ -552,12 +554,16 @@ async def asyncSetUp(self): | |
|
|
||
| def maybe_skip_test(self, spec): | ||
| # add any special-casing for skipping tests here | ||
| if "Client side error in command starting transaction" in spec["description"]: | ||
| class_name = self.__class__.__name__.lower() | ||
| description = spec["description"].lower() | ||
| if "Client side error in command starting transaction" in description: | ||
| self.skipTest("Implement PYTHON-1894") | ||
| if "timeoutMS applied to entire download" in spec["description"]: | ||
| if "type=symbol" in description: | ||
| self.skipTest("PyMongo does not support the symbol type") | ||
| if "timeoutMS applied to entire download" in description: | ||
| self.skipTest("PyMongo's open_download_stream does not cap the stream's lifetime") | ||
| if any( | ||
| x in spec["description"] | ||
| x in description | ||
| for x in [ | ||
| "First insertOne is never committed", | ||
| "Second updateOne is never committed", | ||
|
|
@@ -566,8 +572,6 @@ def maybe_skip_test(self, spec): | |
| ): | ||
| self.skipTest("Implement PYTHON-4597") | ||
|
|
||
| class_name = self.__class__.__name__.lower() | ||
| description = spec["description"].lower() | ||
| if "csot" in class_name: | ||
| # Skip tests that are too slow to run on a given platform. | ||
| slow_macos = [ | ||
|
|
@@ -785,6 +789,42 @@ async def _databaseOperation_createCommandCursor(self, target, **kwargs): | |
|
|
||
| return cursor | ||
|
|
||
| async def _collectionOperation_assertIndexExists(self, target, **kwargs): | ||
| collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] | ||
| index_names = [idx["name"] async for idx in await collection.list_indexes()] | ||
| self.assertIn(kwargs["index_name"], index_names) | ||
|
|
||
| async def _collectionOperation_assertIndexNotExists(self, target, **kwargs): | ||
| collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] | ||
| async for index in await collection.list_indexes(): | ||
| self.assertNotEqual(kwargs["indexName"], index["name"]) | ||
|
|
||
| async def _collectionOperation_assertCollectionExists(self, target, **kwargs): | ||
| database_name = kwargs["database_name"] | ||
| collection_name = kwargs["collection_name"] | ||
| collection_name_list = list( | ||
|
||
| await self.client.get_database(database_name).list_collection_names() | ||
| ) | ||
| self.assertIn(collection_name, collection_name_list) | ||
|
|
||
| async def _databaseOperation_assertIndexExists(self, target, **kwargs): | ||
| collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] | ||
| index_names = [idx["name"] async for idx in await collection.list_indexes()] | ||
| self.assertIn(kwargs["index_name"], index_names) | ||
|
|
||
| async def _databaseOperation_assertIndexNotExists(self, target, **kwargs): | ||
| collection = self.client[kwargs["database_name"]][kwargs["collection_name"]] | ||
| async for index in await collection.list_indexes(): | ||
| self.assertNotEqual(kwargs["indexName"], index["name"]) | ||
|
|
||
| async def _databaseOperation_assertCollectionExists(self, target, **kwargs): | ||
| database_name = kwargs["database_name"] | ||
| collection_name = kwargs["collection_name"] | ||
| collection_name_list = list( | ||
|
||
| await self.client.get_database(database_name).list_collection_names() | ||
| ) | ||
| self.assertIn(collection_name, collection_name_list) | ||
|
|
||
| async def kill_all_sessions(self): | ||
| if getattr(self, "client", None) is None: | ||
| return | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,38 @@ | ||
| { | ||
| "fields": [ | ||
| { | ||
| "keyId": { | ||
| "$binary": { | ||
| "base64": "EjRWeBI0mHYSNBI0VniQEg==", | ||
| "subType": "04" | ||
| } | ||
| }, | ||
| "path": "encryptedText", | ||
| "bsonType": "string", | ||
| "queries": [ | ||
| { | ||
| "queryType": "prefixPreview", | ||
| "strMinQueryLength": { | ||
| "$numberInt": "2" | ||
| }, | ||
| "strMaxQueryLength": { | ||
| "$numberInt": "10" | ||
| }, | ||
| "caseSensitive": true, | ||
| "diacriticSensitive": true | ||
| }, | ||
| { | ||
| "queryType": "suffixPreview", | ||
| "strMinQueryLength": { | ||
| "$numberInt": "2" | ||
| }, | ||
| "strMaxQueryLength": { | ||
| "$numberInt": "10" | ||
| }, | ||
| "caseSensitive": true, | ||
| "diacriticSensitive": true | ||
| } | ||
| ] | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,30 @@ | ||
| { | ||
| "fields": [ | ||
| { | ||
| "keyId": { | ||
| "$binary": { | ||
| "base64": "EjRWeBI0mHYSNBI0VniQEg==", | ||
| "subType": "04" | ||
| } | ||
| }, | ||
| "path": "encryptedText", | ||
| "bsonType": "string", | ||
| "queries": [ | ||
| { | ||
| "queryType": "substringPreview", | ||
| "strMaxLength": { | ||
| "$numberInt": "10" | ||
| }, | ||
| "strMinQueryLength": { | ||
| "$numberInt": "2" | ||
| }, | ||
| "strMaxQueryLength": { | ||
| "$numberInt": "10" | ||
| }, | ||
| "caseSensitive": true, | ||
| "diacriticSensitive": true | ||
| } | ||
| ] | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,14 @@ | ||
| { | ||
| "fields": [ | ||
| { | ||
| "keyId": { | ||
| "$binary": { | ||
| "base64": "LOCALAAAAAAAAAAAAAAAAA==", | ||
| "subType": "04" | ||
| } | ||
| }, | ||
| "path": "foo", | ||
| "bsonType": "string" | ||
| } | ||
| ] | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| { | ||
| "foo": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" | ||
| } |
Uh oh!
There was an error while loading. Please reload this page.