diff --git a/README.md b/README.md index 6db1b7f6..5c1fecfe 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ db = client.db("test", username="root", password="passwd") students = db.create_collection("students") # Add a persistent index to the collection. -students.add_persistent_index(fields=["name"], unique=True) +students.add_index({'type': 'persistent', 'fields': ['name'], 'unique': True}) # Insert new documents into the collection. students.insert({"name": "jane", "age": 39}) diff --git a/arango/client.py b/arango/client.py index 0ccaa19f..12f2bf11 100644 --- a/arango/client.py +++ b/arango/client.py @@ -199,7 +199,6 @@ def db( auth_method: str = "basic", user_token: Optional[str] = None, superuser_token: Optional[str] = None, - verify_certificate: bool = True, ) -> StandardDatabase: """Connect to an ArangoDB database and return the database API wrapper. @@ -228,8 +227,6 @@ def db( are ignored. This token is not refreshed automatically. Token expiry will not be checked. :type superuser_token: str - :param verify_certificate: Verify TLS certificates. - :type verify_certificate: bool :return: Standard database API wrapper. :rtype: arango.database.StandardDatabase :raise arango.exceptions.ServerConnectionError: If **verify** was set diff --git a/arango/collection.py b/arango/collection.py index 820c5200..054b0a1e 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -2,6 +2,7 @@ from numbers import Number from typing import List, Optional, Sequence, Tuple, Union +from warnings import warn from arango.api import ApiGroup from arango.connection import Connection @@ -1074,7 +1075,7 @@ def build_coord_str_from_index(index: Json) -> str: FILTER GEO_CONTAINS(rect, {coord_str}) LIMIT {skip_val}, {limit_val} RETURN doc - """ + """ # noqa: E201 E202 bind_vars = {"@collection": self.name} @@ -1259,11 +1260,27 @@ def response_handler(resp: Response) -> Json: return self._execute(request, response_handler) - def _add_index(self, data: Json) -> Result[Json]: - """Helper method for creating a new index. + def add_index(self, data: Json, formatter: bool = False) -> Result[Json]: + """Create an index. + + .. note:: - :param data: Index data. + As the `add_index` method was made available starting with driver + version 8, we have decided to deprecate the other `add_*_index` + methods, making this the official way to create indexes. While + the other methods still work, we recommend using this one instead. + Note that the other methods would use a formatter by default, + processing the index attributes returned by the server (for the + most part, it does a snake case conversion). This method skips that, + returning the raw index, except for the `id` attribute. However, + if you want the formatter to be applied for backwards compatibility, + you can set the `formatter` parameter to `True`. + + :param data: Index data. Must contain a "type" and "fields" attribute. :type data: dict + :param formatter: If set to True, apply formatting to the returned result. + Should only be used for backwards compatibility. + :type formatter: bool :return: New index details. :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. @@ -1278,7 +1295,7 @@ def _add_index(self, data: Json) -> Result[Json]: def response_handler(resp: Response) -> Json: if not resp.is_success: raise IndexCreateError(resp, request) - return format_index(resp.body) + return format_index(resp.body, formatter) return self._execute(request, response_handler) @@ -1297,8 +1314,7 @@ def add_hash_index( The index types `hash` and `skiplist` are aliases for the persistent index type and should no longer be used to create new indexes. The - aliases will be removed in a future version. Use - :func:`arango.collection.Collection.add_persistent_index` instead. + aliases will be removed in a future version. :param fields: Document fields to index. :type fields: [str] @@ -1318,6 +1334,9 @@ def add_hash_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_hash_index is deprecated. Using add_index with {'type': 'hash'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "hash", "fields": fields} if unique is not None: @@ -1331,7 +1350,7 @@ def add_hash_index( if in_background is not None: data["inBackground"] = in_background - return self._add_index(data) + return self.add_index(data, formatter=True) def add_skiplist_index( self, @@ -1348,8 +1367,7 @@ def add_skiplist_index( The index types `hash` and `skiplist` are aliases for the persistent index type and should no longer be used to create new indexes. The - aliases will be removed in a future version. Use - :func:`arango.collection.Collection.add_persistent_index` instead. + aliases will be removed in a future version. :param fields: Document fields to index. :type fields: [str] @@ -1369,6 +1387,9 @@ def add_skiplist_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_skiplist_index is deprecated. Using add_index with {'type': 'skiplist'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "skiplist", "fields": fields} if unique is not None: @@ -1382,7 +1403,7 @@ def add_skiplist_index( if in_background is not None: data["inBackground"] = in_background - return self._add_index(data) + return self.add_index(data, formatter=True) def add_geo_index( self, @@ -1414,6 +1435,9 @@ def add_geo_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_geo_index is deprecated. Using add_index with {'type': 'geo'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "geo", "fields": fields} if geo_json is not None: @@ -1425,7 +1449,7 @@ def add_geo_index( if legacyPolygons is not None: data["legacyPolygons"] = legacyPolygons - return self._add_index(data) + return self.add_index(data, formatter=True) def add_fulltext_index( self, @@ -1434,9 +1458,11 @@ def add_fulltext_index( name: Optional[str] = None, in_background: Optional[bool] = None, ) -> Result[Json]: - """Create a new fulltext index. This method is deprecated - in ArangoDB 3.10 and will be removed in a future version - of the driver. + """Create a new fulltext index. + + .. warning:: + This method is deprecated since ArangoDB 3.10 and will be removed + in a future version of the driver. :param fields: Document fields to index. :type fields: [str] @@ -1450,6 +1476,9 @@ def add_fulltext_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_fulltext_index is deprecated. Using add_index with {'type': 'fulltext'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "fulltext", "fields": fields} if min_length is not None: @@ -1459,7 +1488,7 @@ def add_fulltext_index( if in_background is not None: data["inBackground"] = in_background - return self._add_index(data) + return self.add_index(data, formatter=True) def add_persistent_index( self, @@ -1502,6 +1531,9 @@ def add_persistent_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_persistent_index is deprecated. Using add_index with {'type': 'persistent'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "persistent", "fields": fields} if unique is not None: @@ -1517,7 +1549,7 @@ def add_persistent_index( if cacheEnabled is not None: data["cacheEnabled"] = cacheEnabled - return self._add_index(data) + return self.add_index(data, formatter=True) def add_ttl_index( self, @@ -1540,6 +1572,9 @@ def add_ttl_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_ttl_index is deprecated. Using add_index with {'type': 'ttl'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "ttl", "fields": fields, "expireAfter": expiry_time} if name is not None: @@ -1547,7 +1582,7 @@ def add_ttl_index( if in_background is not None: data["inBackground"] = in_background - return self._add_index(data) + return self.add_index(data, formatter=True) def add_inverted_index( self, @@ -1602,6 +1637,9 @@ def add_inverted_index( :rtype: dict :raise arango.exceptions.IndexCreateError: If create fails. """ + m = "add_inverted_index is deprecated. Using add_index with {'type': 'inverted'} instead." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + data: Json = {"type": "inverted", "fields": fields} if name is not None: @@ -1631,90 +1669,7 @@ def add_inverted_index( if cache is not None: data["cache"] = cache - return self._add_index(data) - - def add_zkd_index( - self, - fields: Sequence[str], - field_value_types: str = "double", - name: Optional[str] = None, - unique: Optional[bool] = None, - in_background: Optional[bool] = None, - ) -> Result[Json]: - """Create a new ZKD Index. - - :param fields: Document fields to index. Unlike for other indexes the - order of the fields does not matter. - :type fields: Sequence[str] - :param field_value_types: The type of the field values. The only allowed - value is "double" at the moment. Defaults to "double". - :type field_value_types: str - :param name: Optional name for the index. - :type name: str | None - :param unique: Whether the index is unique. - :type unique: bool | None - :param in_background: Do not hold the collection lock. - :type in_background: bool | None - :return: New index details. - :rtype: dict - :raise arango.exceptions.IndexCreateError: If create fails. - """ - data: Json = { - "type": "zkd", - "fields": fields, - "fieldValueTypes": field_value_types, - } - - if unique is not None: - data["unique"] = unique - if name is not None: - data["name"] = name - if in_background is not None: - data["inBackground"] = in_background - - return self._add_index(data) - - def add_mdi_index( - self, - fields: Sequence[str], - field_value_types: str = "double", - name: Optional[str] = None, - unique: Optional[bool] = None, - in_background: Optional[bool] = None, - ) -> Result[Json]: - """Create a new MDI index, previously known as ZKD index. This method - is only usable with ArangoDB 3.12 and later. - - :param fields: Document fields to index. Unlike for other indexes the - order of the fields does not matter. - :type fields: Sequence[str] - :param field_value_types: The type of the field values. The only allowed - value is "double" at the moment. Defaults to "double". - :type field_value_types: str - :param name: Optional name for the index. - :type name: str | None - :param unique: Whether the index is unique. - :type unique: bool | None - :param in_background: Do not hold the collection lock. - :type in_background: bool | None - :return: New index details. - :rtype: dict - :raise arango.exceptions.IndexCreateError: If create fails. - """ - data: Json = { - "type": "mdi", - "fields": fields, - "fieldValueTypes": field_value_types, - } - - if unique is not None: - data["unique"] = unique - if name is not None: - data["name"] = name - if in_background is not None: - data["inBackground"] = in_background - - return self._add_index(data) + return self.add_index(data, formatter=True) def delete_index(self, index_id: str, ignore_missing: bool = False) -> Result[bool]: """Delete an index. @@ -2076,7 +2031,7 @@ def update_match( {f"LIMIT {limit}" if limit is not None else ""} UPDATE doc WITH @body IN @@collection OPTIONS {{ keepNull: @keep_none, mergeObjects: @merge {sync_val} }} - """ + """ # noqa: E201 E202 bind_vars = { "@collection": self.name, @@ -2261,7 +2216,7 @@ def replace_match( {f"LIMIT {limit}" if limit is not None else ""} REPLACE doc WITH @body IN @@collection {f"OPTIONS {{ {sync_val} }}" if sync_val else ""} - """ + """ # noqa: E201 E202 bind_vars = {"@collection": self.name, "body": body} @@ -2426,7 +2381,7 @@ def delete_match( {f"LIMIT {limit}" if limit is not None else ""} REMOVE doc IN @@collection {f"OPTIONS {{ {sync_val} }}" if sync_val else ""} - """ + """ # noqa: E201 E202 bind_vars = {"@collection": self.name} diff --git a/arango/formatter.py b/arango/formatter.py index c948f279..5df9b553 100644 --- a/arango/formatter.py +++ b/arango/formatter.py @@ -20,14 +20,22 @@ def format_body(body: Json) -> Json: return body -def format_index(body: Json) -> Json: +def format_index(body: Json, formatter: bool = True) -> Json: """Format index data. :param body: Input body. :type body: dict + :param formatter: Convert (most) keys to snake_case. + :type formatter: bool :return: Formatted body. :rtype: dict """ + if not formatter: + body.pop("code") + body.pop("error") + body["id"] = body["id"].split("/", 1)[-1] + return body + result = {"id": body["id"].split("/", 1)[-1], "fields": body["fields"]} if "type" in body: result["type"] = body["type"] diff --git a/docs/indexes.rst b/docs/indexes.rst index d752f732..8df3048f 100644 --- a/docs/indexes.rst +++ b/docs/indexes.rst @@ -28,26 +28,36 @@ on fields ``_from`` and ``_to``. For more information on indexes, refer to cities.indexes() # Add a new persistent index on document fields "continent" and "country". - index = cities.add_persistent_index(fields=['continent', 'country'], unique=True) + persistent_index = {'type': 'persistent', 'fields': ['continent', 'country'], 'unique': True} + index = cities.add_index(persistent_index) # Add new fulltext indexes on fields "continent" and "country". - index = cities.add_fulltext_index(fields=['continent']) - index = cities.add_fulltext_index(fields=['country']) + index = cities.add_index({'type': 'fulltext', 'fields': ['continent']}) + index = cities.add_index({'type': 'fulltext', 'fields': ['country']}) # Add a new persistent index on field 'population'. - index = cities.add_persistent_index(fields=['population'], sparse=False) + persistent_index = {'type': 'persistent', 'fields': ['population'], 'sparse': False} + index = cities.add_index(persistent_index) # Add a new geo-spatial index on field 'coordinates'. - index = cities.add_geo_index(fields=['coordinates']) + geo_index = {'type': 'geo', 'fields': ['coordinates']} + index = cities.add_index(geo_index) # Add a new persistent index on field 'currency'. - index = cities.add_persistent_index(fields=['currency'], sparse=True) + persistent_index = {'type': 'persistent', 'fields': ['currency'], 'sparse': True} + index = cities.add_index(persistent_index) # Add a new TTL (time-to-live) index on field 'currency'. - index = cities.add_ttl_index(fields=['currency'], expiry_time=200) + ttl_index = {'type': 'ttl', 'fields': ['currency'], 'expireAfter': 200} + index = cities.add_index(ttl_index) + + # Add MDI (multi-dimensional) index on field 'x' and 'y'. + mdi_index = {'type': 'mdi', 'fields': ['x', 'y'], 'fieldValueTypes': 'double'} + index = cities.add_index(mdi_index) # Indexes may be added with a name that can be referred to in AQL queries. - index = cities.add_persistent_index(fields=['country'], name='my_persistent_index') + persistent_index = {'type': 'persistent', 'fields': ['country'], 'unique': True, 'name': 'my_hash_index'} + index = cities.add_index(persistent_index) # Delete the last index from the collection. cities.delete_index(index['id']) diff --git a/docs/overview.rst b/docs/overview.rst index 76ff4155..053658df 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -29,8 +29,8 @@ Here is an example showing how **python-arango** client can be used: else: students = db.create_collection('students') - # Add a hash index to the collection. - students.add_hash_index(fields=['name'], unique=False) + # Add a persistent index to the collection. + students.add_index({'type': 'persistent', 'fields': ['name'], 'unique': False}) # Truncate the collection. students.truncate() diff --git a/pyproject.toml b/pyproject.toml index dee5fe8b..9f792f0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ description = "Python Driver for ArangoDB" authors = [ {name= "Joohwan Oh", email = "joohwan.oh@outlook.com" }] maintainers = [ {name = "Joohwan Oh", email = "joohwan.oh@outlook.com"}, - {name = "Alexandru Petenchea", email = "alexandru.petenchea@arangodb.com"}, + {name = "Alexandru Petenchea", email = "alex.petenchea@gmail.com"}, {name = "Anthony Mahanna", email = "anthony.mahanna@arangodb.com"} ] keywords = ["arangodb", "python", "driver"] diff --git a/tests/conftest.py b/tests/conftest.py index 184269d7..ee5a0cd3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -106,9 +106,9 @@ def pytest_configure(config): col_name = generate_col_name() tst_col = tst_db.create_collection(col_name, edge=False) - tst_col.add_skiplist_index(["val"]) - tst_col.add_fulltext_index(["text"]) - geo_index = tst_col.add_geo_index(["loc"]) + tst_col.add_index({"type": "skiplist", "fields": ["val"]}) + tst_col.add_index({"type": "fulltext", "fields": ["text"]}) + geo_index = tst_col.add_index({"type": "geo", "fields": ["loc"]}) # Create a legacy edge collection for testing. icol_name = generate_col_name() diff --git a/tests/test_collection.py b/tests/test_collection.py index 6b10ba00..7ab72800 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -369,7 +369,8 @@ def create_and_delete_collection(db, name): assert col.name == name assert db.has_collection(name) is True - index_id = col.add_hash_index(fields=["foo"])["name"] + persistent_index = {"type": "persistent", "fields": ["foo"]} + index_id = col.add_index(persistent_index)["name"] assert index_id == col.indexes()[-1]["name"] assert col.delete_index(index_id) is True diff --git a/tests/test_document.py b/tests/test_document.py index bd471e42..8e53ba26 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1464,7 +1464,8 @@ def test_document_find_in_box(db, col, bad_col, geo, cluster): ) # Test find_in_box with non-geo index - non_geo = col.add_hash_index(fields=["loc"]) + persistent_index = {"type": "persistent", "fields": ["loc"]} + non_geo = col.add_index(persistent_index) with assert_raises(ValueError) as err: col.find_in_box( latitude1=0, diff --git a/tests/test_index.py b/tests/test_index.py index 41ffb301..a5d0f5eb 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -50,19 +50,22 @@ def test_add_hash_index(icol): icol = icol fields = ["attr1", "attr2"] - result = icol.add_hash_index( - fields=fields, - unique=True, - sparse=True, - deduplicate=True, - name="hash_index", - in_background=False, + result = icol.add_index( + { + "type": "hash", + "fields": fields, + "unique": True, + "sparse": True, + "deduplicate": True, + "name": "hash_index", + "inBackground": False, + } ) expected_index = { "sparse": True, "type": "hash", - "fields": ["attr1", "attr2"], + "fields": fields, "unique": True, "deduplicate": True, "name": "hash_index", @@ -78,13 +81,16 @@ def test_add_hash_index(icol): def test_add_skiplist_index(icol): fields = ["attr1", "attr2"] - result = icol.add_skiplist_index( - fields=fields, - unique=True, - sparse=True, - deduplicate=True, - name="skiplist_index", - in_background=False, + result = icol.add_index( + { + "type": "skiplist", + "fields": fields, + "unique": True, + "sparse": True, + "deduplicate": True, + "name": "skiplist_index", + "inBackground": False, + } ) expected_index = { @@ -106,8 +112,14 @@ def test_add_skiplist_index(icol): def test_add_geo_index(icol): # Test add geo index with one attribute - result = icol.add_geo_index( - fields=["attr1"], geo_json=True, name="geo_index", in_background=True + result = icol.add_index( + { + "type": "geo", + "fields": ["attr1"], + "geoJson": True, + "name": "geo_index", + "inBackground": True, + } ) expected_index = { @@ -115,18 +127,21 @@ def test_add_geo_index(icol): "type": "geo", "fields": ["attr1"], "unique": False, - "geo_json": True, + "geoJson": True, "name": "geo_index", } for key, value in expected_index.items(): - assert result[key] == value + assert result[key] == value, (key, value, result[key]) assert result["id"] in extract("id", icol.indexes()) # Test add geo index with two attributes - result = icol.add_geo_index( - fields=["attr1", "attr2"], - geo_json=False, + result = icol.add_index( + { + "type": "geo", + "fields": ["attr1", "attr2"], + "geoJson": False, + } ) expected_index = { "sparse": True, @@ -141,7 +156,7 @@ def test_add_geo_index(icol): # Test add geo index with more than two attributes (should fail) with assert_raises(IndexCreateError) as err: - icol.add_geo_index(fields=["attr1", "attr2", "attr3"]) + icol.add_index({"type": "geo", "fields": ["attr1", "attr2", "attr3"]}) assert err.value.error_code == 10 # Clean up the index @@ -150,14 +165,20 @@ def test_add_geo_index(icol): def test_add_fulltext_index(icol): # Test add fulltext index with one attributes - result = icol.add_fulltext_index( - fields=["attr1"], min_length=10, name="fulltext_index", in_background=True + result = icol.add_index( + { + "type": "fulltext", + "fields": ["attr1"], + "minLength": 10, + "name": "fulltext_index", + "inBackground": True, + } ) expected_index = { "sparse": True, "type": "fulltext", "fields": ["attr1"], - "min_length": 10, + "minLength": 10, "unique": False, "name": "fulltext_index", } @@ -168,7 +189,7 @@ def test_add_fulltext_index(icol): # Test add fulltext index with two attributes (should fail) with assert_raises(IndexCreateError) as err: - icol.add_fulltext_index(fields=["attr1", "attr2"]) + icol.add_index({"type": "fulltext", "fields": ["attr1", "attr2"]}) assert err.value.error_code == 10 # Clean up the index @@ -177,12 +198,15 @@ def test_add_fulltext_index(icol): def test_add_persistent_index(icol): # Test add persistent index with two attributes - result = icol.add_persistent_index( - fields=["attr1", "attr2"], - unique=True, - sparse=True, - name="persistent_index", - in_background=True, + result = icol.add_index( + { + "type": "persistent", + "fields": ["attr1", "attr2"], + "unique": True, + "sparse": True, + "name": "persistent_index", + "inBackground": True, + } ) expected_index = { "sparse": True, @@ -202,13 +226,19 @@ def test_add_persistent_index(icol): def test_add_ttl_index(icol): # Test add persistent index with two attributes - result = icol.add_ttl_index( - fields=["attr1"], expiry_time=1000, name="ttl_index", in_background=True + result = icol.add_index( + { + "type": "ttl", + "fields": ["attr1"], + "expireAfter": 1000, + "name": "ttl_index", + "inBackground": True, + } ) expected_index = { "type": "ttl", "fields": ["attr1"], - "expiry_time": 1000, + "expireAfter": 1000, "name": "ttl_index", } for key, value in expected_index.items(): @@ -229,14 +259,14 @@ def test_add_inverted_index(icol, enterprise): analyzer="identity", primarySort={"cache": True, "fields": [{"field": "a", "direction": "asc"}]}, ) - expected_keys = ["primary_sort", "analyzer", "include_all_fields", "search_field"] + expected_keys = ["primarySort", "analyzer", "includeAllFields", "searchField"] if enterprise: parameters["cache"] = True parameters["primaryKeyCache"] = True expected_keys.extend(["cache", "primaryKeyCache"]) - result = icol.add_inverted_index(**parameters) + result = icol.add_index({"type": "inverted", **parameters}) assert result["id"] in extract("id", icol.indexes()) for key in expected_keys: @@ -246,19 +276,22 @@ def test_add_inverted_index(icol, enterprise): def test_add_zkd_index(icol, db_version): - result = icol.add_zkd_index( - name="zkd_index", - fields=["x", "y", "z"], - field_value_types="double", - in_background=False, - unique=False, + result = icol.add_index( + { + "type": "zkd", + "fields": ["x", "y", "z"], + "fieldValueTypes": "double", + "name": "zkd_index", + "inBackground": False, + "unique": False, + } ) expected_index = { "name": "zkd_index", "type": "zkd", "fields": ["x", "y", "z"], - "new": True, + "isNewlyCreated": True, "unique": False, } @@ -268,7 +301,9 @@ def test_add_zkd_index(icol, db_version): assert result["id"] in extract("id", icol.indexes()) with assert_raises(IndexCreateError) as err: - icol.add_zkd_index(field_value_types="integer", fields=["x", "y", "z"]) + icol.add_index( + {"type": "zkd", "fieldValueTypes": "integer", "fields": ["x", "y", "z"]} + ) assert err.value.error_code == 10 icol.delete_index(result["id"]) @@ -278,19 +313,22 @@ def test_add_mdi_index(icol, db_version): if db_version < version.parse("3.12.0"): pytest.skip("MDI indexes are usable with 3.12+ only") - result = icol.add_mdi_index( - name="mdi_index", - fields=["x", "y", "z"], - field_value_types="double", - in_background=False, - unique=True, + result = icol.add_index( + { + "type": "mdi", + "fields": ["x", "y", "z"], + "fieldValueTypes": "double", + "name": "mdi_index", + "inBackground": False, + "unique": True, + } ) expected_index = { "name": "mdi_index", "type": "mdi", "fields": ["x", "y", "z"], - "new": True, + "isNewlyCreated": True, "unique": True, } @@ -300,7 +338,13 @@ def test_add_mdi_index(icol, db_version): assert result["id"] in extract("id", icol.indexes()) with assert_raises(IndexCreateError) as err: - icol.add_mdi_index(field_value_types="integer", fields=["x", "y", "z"]) + icol.add_index( + { + "type": "mdi", + "fieldValueTypes": "integer", + "fields": ["x", "y", "z"], + } + ) assert err.value.error_code == 10 icol.delete_index(result["id"]) @@ -308,9 +352,12 @@ def test_add_mdi_index(icol, db_version): def test_delete_index(icol, bad_col): old_indexes = set(extract("id", icol.indexes())) - icol.add_hash_index(["attr3", "attr4"], unique=True) - icol.add_skiplist_index(["attr3", "attr4"], unique=True) - icol.add_fulltext_index(fields=["attr3"], min_length=10) + hash_index = {"type": "hash", "fields": ["attr1", "attr2"], "unique": True} + icol.add_index(hash_index) + skiplist_Index = {"type": "skiplist", "fields": ["attr3", "attr4"], "unique": True} + icol.add_index(skiplist_Index) + fulltext_index = {"type": "fulltext", "fields": ["attr5"], "min_length": 10} + icol.add_index(fulltext_index) new_indexes = set(extract("id", icol.indexes())) assert new_indexes.issuperset(old_indexes)