mirror of
https://gitee.com/milvus-io/milvus.git
synced 2025-12-06 17:18:35 +08:00
test: add restful v2 api testcases (#39558)
/kind improvement Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
This commit is contained in:
parent
ac7550da82
commit
8fd39779f7
@ -10,8 +10,7 @@ from tenacity import retry, retry_if_exception_type, stop_after_attempt
|
||||
from requests.exceptions import ConnectionError
|
||||
import urllib.parse
|
||||
|
||||
REQUEST_TIMEOUT = 120
|
||||
|
||||
REQUEST_TIMEOUT = "120"
|
||||
|
||||
ENABLE_LOG_SAVE = False
|
||||
|
||||
@ -116,7 +115,8 @@ class Requests():
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -189,7 +189,8 @@ class VectorClient(Requests):
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'Accept-Type-Allow-Int64': "true",
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -351,7 +352,6 @@ class CollectionClient(Requests):
|
||||
else:
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
@classmethod
|
||||
def update_headers(cls, headers=None):
|
||||
if headers is not None:
|
||||
@ -359,7 +359,8 @@ class CollectionClient(Requests):
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -549,13 +550,11 @@ class CollectionClient(Requests):
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def alter_index_properties(self, collection_name, index_name, properties, db_name="default"):
|
||||
"""Alter index properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/indexes/alter_properties"
|
||||
def flush(self, collection_name, db_name="default"):
|
||||
"""Flush collection"""
|
||||
url = f"{self.endpoint}/v2/vectordb/collections/flush"
|
||||
payload = {
|
||||
"collectionName": collection_name,
|
||||
"indexName": index_name,
|
||||
"properties": properties
|
||||
"collectionName": collection_name
|
||||
}
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
@ -564,13 +563,24 @@ class CollectionClient(Requests):
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def drop_index_properties(self, collection_name, index_name, delete_keys, db_name="default"):
|
||||
"""Drop index properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/indexes/drop_properties"
|
||||
def compact(self, collection_name, db_name="default"):
|
||||
"""Compact collection"""
|
||||
url = f"{self.endpoint}/v2/vectordb/collections/compact"
|
||||
payload = {
|
||||
"collectionName": collection_name,
|
||||
"indexName": index_name,
|
||||
"propertyKeys": delete_keys
|
||||
"collectionName": collection_name
|
||||
}
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
if db_name != "default":
|
||||
payload["dbName"] = db_name
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def get_compaction_state(self, collection_name, db_name="default"):
|
||||
"""Get compaction state"""
|
||||
url = f"{self.endpoint}/v2/vectordb/collections/get_compaction_state"
|
||||
payload = {
|
||||
"collectionName": collection_name
|
||||
}
|
||||
if self.db_name is not None:
|
||||
payload["dbName"] = self.db_name
|
||||
@ -594,7 +604,8 @@ class PartitionClient(Requests):
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -835,7 +846,8 @@ class IndexClient(Requests):
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -848,7 +860,7 @@ class IndexClient(Requests):
|
||||
res = response.json()
|
||||
return res
|
||||
|
||||
def index_describe(self, collection_name=None, index_name=None, db_name="default",):
|
||||
def index_describe(self, collection_name=None, index_name=None, db_name="default", ):
|
||||
url = f'{self.endpoint}/v2/vectordb/indexes/describe'
|
||||
if self.db_name is not None:
|
||||
db_name = self.db_name
|
||||
@ -882,6 +894,36 @@ class IndexClient(Requests):
|
||||
res = response.json()
|
||||
return res
|
||||
|
||||
def alter_index_properties(self, collection_name, index_name, properties, db_name="default"):
|
||||
"""Alter index properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/indexes/alter_properties"
|
||||
payload = {
|
||||
"collectionName": collection_name,
|
||||
"indexName": index_name,
|
||||
"properties": properties
|
||||
}
|
||||
if self.db_name is not None:
|
||||
db_name = self.db_name
|
||||
if db_name != "default":
|
||||
payload["dbName"] = db_name
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def drop_index_properties(self, collection_name, index_name, delete_keys, db_name="default"):
|
||||
"""Drop index properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/indexes/drop_properties"
|
||||
payload = {
|
||||
"collectionName": collection_name,
|
||||
"indexName": index_name,
|
||||
"propertyKeys": delete_keys
|
||||
}
|
||||
if self.db_name is not None:
|
||||
db_name = self.db_name
|
||||
if db_name != "default":
|
||||
payload["dbName"] = db_name
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
|
||||
class AliasClient(Requests):
|
||||
|
||||
@ -949,7 +991,8 @@ class ImportJobClient(Requests):
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f'Bearer {cls.api_key}',
|
||||
'RequestId': cls.uuid
|
||||
'RequestId': cls.uuid,
|
||||
"Request-Timeout": REQUEST_TIMEOUT
|
||||
}
|
||||
return headers
|
||||
|
||||
@ -1047,10 +1090,28 @@ class DatabaseClient(Requests):
|
||||
def database_drop(self, payload):
|
||||
"""Drop a database"""
|
||||
url = f"{self.endpoint}/v2/vectordb/databases/drop"
|
||||
rsp = self.post(url, data=payload).json()
|
||||
if rsp['code'] == 0 and payload['dbName'] in self.db_names:
|
||||
self.db_names.remove(payload['dbName'])
|
||||
return rsp
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def alter_database_properties(self, db_name, properties):
|
||||
"""Alter database properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/databases/alter"
|
||||
payload = {
|
||||
"dbName": db_name,
|
||||
"properties": properties
|
||||
}
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
def drop_database_properties(self, db_name, property_keys):
|
||||
"""Drop database properties"""
|
||||
url = f"{self.endpoint}/v2/vectordb/databases/drop_properties"
|
||||
payload = {
|
||||
"dbName": db_name,
|
||||
"propertyKeys": property_keys
|
||||
}
|
||||
response = self.post(url, headers=self.update_headers(), data=payload)
|
||||
return response.json()
|
||||
|
||||
|
||||
class StorageClient():
|
||||
|
||||
@ -2,7 +2,7 @@ import datetime
|
||||
import logging
|
||||
import time
|
||||
from utils.util_log import test_log as logger
|
||||
from utils.utils import gen_collection_name
|
||||
from utils.utils import gen_collection_name, gen_vector
|
||||
import pytest
|
||||
from api.milvus import CollectionClient
|
||||
from base.testbase import TestBase
|
||||
@ -537,7 +537,6 @@ class TestCreateCollection(TestBase):
|
||||
@pytest.mark.parametrize("enable_partition_key", [True])
|
||||
@pytest.mark.parametrize("dim", [128])
|
||||
@pytest.mark.parametrize("metric_type", ["JACCARD", "HAMMING"])
|
||||
@pytest.mark.skip(reason="https://github.com/milvus-io/milvus/issues/31494")
|
||||
def test_create_collections_binary_vector_datatype(self, dim, auto_id, enable_dynamic_field, enable_partition_key,
|
||||
metric_type):
|
||||
"""
|
||||
@ -956,7 +955,6 @@ class TestGetCollectionStats(TestBase):
|
||||
"metricType": "L2",
|
||||
"dimension": dim,
|
||||
}
|
||||
time.sleep(1)
|
||||
rsp = client.collection_create(payload)
|
||||
assert rsp['code'] == 0
|
||||
# describe collection
|
||||
@ -1409,7 +1407,6 @@ class TestCollectionWithAuth(TestBase):
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
@pytest.mark.skip("skip temporarily, need fix")
|
||||
class TestCollectionProperties(TestBase):
|
||||
"""Test collection property operations"""
|
||||
|
||||
@ -1559,3 +1556,108 @@ class TestCollectionProperties(TestBase):
|
||||
assert p['value'] == "100"
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestCollectionMaintenance(TestBase):
|
||||
"""Test collection maintenance operations"""
|
||||
|
||||
@pytest.mark.xfail(reason="issue: https://github.com/milvus-io/milvus/issues/39546")
|
||||
def test_collection_flush(self):
|
||||
"""
|
||||
target: test collection flush
|
||||
method: create collection, insert data multiple times and flush
|
||||
expected: flush successfully
|
||||
"""
|
||||
# Create collection
|
||||
name = gen_collection_name()
|
||||
client = self.collection_client
|
||||
vector_client = self.vector_client
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
"schema": {
|
||||
"fields": [
|
||||
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
|
||||
{"fieldName": "my_vector", "dataType": "FloatVector", "elementTypeParams": {"dim": 128}}
|
||||
]
|
||||
}
|
||||
}
|
||||
client.collection_create(payload)
|
||||
|
||||
# Insert small batches of data multiple times
|
||||
for i in range(3):
|
||||
vectors = [gen_vector(dim=128) for _ in range(10)]
|
||||
insert_data = {
|
||||
"collectionName": name,
|
||||
"data": [
|
||||
{
|
||||
"book_id": i * 10 + j,
|
||||
"my_vector": vector
|
||||
}
|
||||
for i, vector in enumerate(vectors)
|
||||
for j in range(10)
|
||||
]
|
||||
}
|
||||
response = vector_client.vector_insert(insert_data)
|
||||
assert response["code"] == 0
|
||||
c = Collection(name)
|
||||
num_entities_before_flush = c.num_entities
|
||||
# Flush collection
|
||||
response = client.flush(name)
|
||||
assert response["code"] == 0
|
||||
# check segments
|
||||
num_entities_after_flush = c.num_entities
|
||||
logger.info(f"num_entities_before_flush: {num_entities_before_flush}, num_entities_after_flush: {num_entities_after_flush}")
|
||||
assert num_entities_after_flush > num_entities_before_flush
|
||||
|
||||
def test_collection_compact(self):
|
||||
"""
|
||||
target: test collection compact
|
||||
method: create collection, insert data, flush multiple times, then compact
|
||||
expected: compact successfully
|
||||
"""
|
||||
# Create collection
|
||||
name = gen_collection_name()
|
||||
client = self.collection_client
|
||||
vector_client = self.vector_client
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
"schema": {
|
||||
"fields": [
|
||||
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
|
||||
{"fieldName": "my_vector", "dataType": "FloatVector", "elementTypeParams": {"dim": 128}}
|
||||
]
|
||||
}
|
||||
}
|
||||
client.collection_create(payload)
|
||||
|
||||
# Insert and flush multiple times
|
||||
for i in range(3):
|
||||
# Insert data
|
||||
vectors = [gen_vector(dim=128) for _ in range(10)]
|
||||
insert_data = {
|
||||
"collectionName": name,
|
||||
"data": [
|
||||
{
|
||||
"book_id": i * 10 + j,
|
||||
"my_vector": vector
|
||||
}
|
||||
for i, vector in enumerate(vectors)
|
||||
for j in range(10)
|
||||
]
|
||||
}
|
||||
response = vector_client.vector_insert(insert_data)
|
||||
assert response["code"] == 0
|
||||
|
||||
# Flush after each insert
|
||||
c = Collection(name)
|
||||
c.flush()
|
||||
# Compact collection
|
||||
response = client.compact(name)
|
||||
assert response["code"] == 0
|
||||
|
||||
# Get compaction state
|
||||
response = client.get_compaction_state(name)
|
||||
assert response["code"] == 0
|
||||
assert "state" in response["data"]
|
||||
assert "compactionID" in response["data"]
|
||||
# TODO need verification by pymilvus
|
||||
|
||||
|
||||
@ -162,3 +162,54 @@ class TestDatabaseOperationNegative(TestBase):
|
||||
"""
|
||||
rsp = self.database_client.database_drop({"dbName": "default"})
|
||||
assert rsp["code"] != 0
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestDatabaseProperties(TestBase):
|
||||
"""Test database properties operations"""
|
||||
|
||||
@pytest.mark.xfail(reason="issue: https://github.com/milvus-io/milvus/issues/39953")
|
||||
def test_alter_database_properties(self):
|
||||
"""
|
||||
target: test alter database properties
|
||||
method: create database, alter database properties
|
||||
expected: alter database properties successfully
|
||||
"""
|
||||
# Create database
|
||||
client = self.database_client
|
||||
db_name = "test_alter_props"
|
||||
payload = {
|
||||
"dbName": db_name
|
||||
}
|
||||
response = client.database_create(payload)
|
||||
assert response["code"] == 0
|
||||
orders = [[True, False], [False, True]]
|
||||
values_after_drop = []
|
||||
for order in orders:
|
||||
for value in order:
|
||||
# Alter database properties
|
||||
properties = {"mmap.enabled": value}
|
||||
response = client.alter_database_properties(db_name, properties)
|
||||
assert response["code"] == 0
|
||||
|
||||
# describe database properties
|
||||
response = client.database_describe({"dbName": db_name})
|
||||
assert response["code"] == 0
|
||||
for prop in response["data"]["properties"]:
|
||||
if prop["key"] == "mmap.enabled":
|
||||
assert prop["value"] == str(value).lower()
|
||||
# Drop database properties
|
||||
property_keys = ["mmap.enabled"]
|
||||
response = client.drop_database_properties(db_name, property_keys)
|
||||
assert response["code"] == 0
|
||||
# describe database properties
|
||||
response = client.database_describe({"dbName": db_name})
|
||||
assert response["code"] == 0
|
||||
value = None
|
||||
for prop in response["data"]["properties"]:
|
||||
if prop["key"] == "mmap.enabled":
|
||||
value = prop["value"]
|
||||
values_after_drop.append(value)
|
||||
# assert all values after drop are same
|
||||
for value in values_after_drop:
|
||||
assert value == values_after_drop[0]
|
||||
|
||||
@ -35,7 +35,6 @@ class TestCreateIndex(TestBase):
|
||||
@pytest.mark.parametrize("metric_type", ["L2", "COSINE", "IP"])
|
||||
@pytest.mark.parametrize("index_type", ["AUTOINDEX", "IVF_SQ8", "HNSW"])
|
||||
@pytest.mark.parametrize("dim", [128])
|
||||
@pytest.mark.xfail(reason="issue: https://github.com/milvus-io/milvus/issues/36365")
|
||||
def test_index_default(self, dim, metric_type, index_type):
|
||||
"""
|
||||
target: test create collection
|
||||
@ -259,7 +258,6 @@ class TestCreateIndex(TestBase):
|
||||
@pytest.mark.parametrize("index_type", ['SPARSE_INVERTED_INDEX', 'SPARSE_WAND'])
|
||||
@pytest.mark.parametrize("bm25_k1", [1.2, 1.5])
|
||||
@pytest.mark.parametrize("bm25_b", [0.7, 0.5])
|
||||
@pytest.mark.xfail(reason="issue: https://github.com/milvus-io/milvus/issues/36365")
|
||||
def test_create_index_for_full_text_search(self, nb, dim, insert_round, auto_id, is_partition_key,
|
||||
enable_dynamic_schema, tokenizer, index_type, bm25_k1, bm25_b):
|
||||
"""
|
||||
@ -363,6 +361,156 @@ class TestCreateIndex(TestBase):
|
||||
assert info['index_param']['index_type'] == index_type
|
||||
|
||||
|
||||
@pytest.mark.L0
|
||||
class TestIndexProperties(TestBase):
|
||||
"""Test index properties operations"""
|
||||
|
||||
def test_alter_index_properties(self):
|
||||
"""
|
||||
target: test alter index properties
|
||||
method: create collection with index, alter index properties
|
||||
expected: alter index properties successfully
|
||||
"""
|
||||
# Create collection
|
||||
name = gen_collection_name()
|
||||
collection_client = self.collection_client
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
"schema": {
|
||||
"fields": [
|
||||
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
|
||||
{"fieldName": "my_vector", "dataType": "FloatVector", "elementTypeParams": {"dim": 128}}
|
||||
]
|
||||
}
|
||||
}
|
||||
collection_client.collection_create(payload)
|
||||
|
||||
# Create index
|
||||
index_client = self.index_client
|
||||
index_payload = {
|
||||
"collectionName": name,
|
||||
"indexParams": [
|
||||
{
|
||||
"fieldName": "my_vector",
|
||||
"indexName": "my_vector",
|
||||
"indexType": "IVF_SQ8",
|
||||
"metricType": "L2",
|
||||
"params": {"nlist": 128}
|
||||
}
|
||||
|
||||
],
|
||||
}
|
||||
index_client.index_create(index_payload)
|
||||
# list index
|
||||
rsp = index_client.index_list(name)
|
||||
assert rsp['code'] == 0
|
||||
|
||||
# Alter index properties
|
||||
properties = {"mmap.enabled": True}
|
||||
response = index_client.alter_index_properties(name, "my_vector", properties)
|
||||
assert response["code"] == 0
|
||||
|
||||
# describe index
|
||||
rsp = index_client.index_describe(name, "my_vector")
|
||||
assert rsp['code'] == 0
|
||||
|
||||
# Drop index properties
|
||||
delete_keys = ["mmap.enabled"]
|
||||
response = index_client.drop_index_properties(name, "my_vector", delete_keys)
|
||||
assert response["code"] == 0
|
||||
|
||||
# describe index
|
||||
rsp = index_client.index_describe(name, "my_vector")
|
||||
assert rsp['code'] == 0
|
||||
|
||||
@pytest.mark.parametrize("invalid_property", [
|
||||
{"invalid_key": True},
|
||||
{"mmap.enabled": "invalid_value"}
|
||||
])
|
||||
def test_alter_index_properties_with_invalid_properties(self, invalid_property):
|
||||
"""
|
||||
target: test alter index properties with invalid properties
|
||||
method: create collection with index, alter index properties with invalid properties
|
||||
expected: alter index properties failed with error
|
||||
"""
|
||||
# Create collection
|
||||
name = gen_collection_name()
|
||||
collection_client = self.collection_client
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
"schema": {
|
||||
"fields": [
|
||||
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
|
||||
{"fieldName": "my_vector", "dataType": "FloatVector", "elementTypeParams": {"dim": 128}}
|
||||
]
|
||||
}
|
||||
}
|
||||
collection_client.collection_create(payload)
|
||||
|
||||
# Create index
|
||||
index_client = self.index_client
|
||||
index_payload = {
|
||||
"collectionName": name,
|
||||
"indexParams": [
|
||||
{
|
||||
"fieldName": "my_vector",
|
||||
"indexName": "my_vector",
|
||||
"indexType": "IVF_SQ8",
|
||||
"metricType": "L2",
|
||||
"params": {"nlist": 128}
|
||||
}
|
||||
|
||||
],
|
||||
}
|
||||
index_client.index_create(index_payload)
|
||||
|
||||
# Alter index properties with invalid property
|
||||
rsp = index_client.alter_index_properties(name, "my_vector", invalid_property)
|
||||
assert rsp['code'] == 1100
|
||||
|
||||
def test_drop_index_properties_with_nonexistent_key(self):
|
||||
"""
|
||||
target: test drop index properties with nonexistent key
|
||||
method: create collection with index, drop index properties with nonexistent key
|
||||
expected: drop index properties failed with error
|
||||
"""
|
||||
# Create collection
|
||||
name = gen_collection_name()
|
||||
collection_client = self.collection_client
|
||||
payload = {
|
||||
"collectionName": name,
|
||||
"schema": {
|
||||
"fields": [
|
||||
{"fieldName": "book_id", "dataType": "Int64", "isPrimary": True, "elementTypeParams": {}},
|
||||
{"fieldName": "my_vector", "dataType": "FloatVector", "elementTypeParams": {"dim": 128}}
|
||||
]
|
||||
}
|
||||
}
|
||||
collection_client.collection_create(payload)
|
||||
|
||||
# Create index
|
||||
index_client = self.index_client
|
||||
index_payload = {
|
||||
"collectionName": name,
|
||||
"indexParams": [
|
||||
{
|
||||
"fieldName": "my_vector",
|
||||
"indexName": "my_vector",
|
||||
"indexType": "IVF_SQ8",
|
||||
"metricType": "L2",
|
||||
"params": {"nlist": 128}
|
||||
}
|
||||
|
||||
],
|
||||
}
|
||||
index_client.index_create(index_payload)
|
||||
|
||||
# Drop index properties with nonexistent key
|
||||
delete_keys = ["nonexistent.key"]
|
||||
rsp = index_client.drop_index_properties(name, "my_vector", delete_keys)
|
||||
assert rsp['code'] == 1100
|
||||
|
||||
|
||||
@pytest.mark.L1
|
||||
class TestCreateIndexNegative(TestBase):
|
||||
|
||||
|
||||
@ -4168,4 +4168,4 @@ class TestVectorWithAuth(TestBase):
|
||||
client = self.vector_client
|
||||
client.api_key = "invalid_api_key"
|
||||
rsp = client.vector_delete(payload)
|
||||
assert rsp['code'] == 1800
|
||||
assert rsp['code'] == 1800
|
||||
@ -302,7 +302,7 @@ def gen_bf16_vectors(num, dim):
|
||||
return raw_vectors, bf16_vectors
|
||||
|
||||
|
||||
def gen_vector(datatype="float_vector", dim=128, binary_data=False, sparse_format='dok'):
|
||||
def gen_vector(datatype="FloatVector", dim=128, binary_data=False, sparse_format='dok'):
|
||||
value = None
|
||||
if datatype == "FloatVector":
|
||||
return preprocessing.normalize([np.array([random.random() for i in range(dim)])])[0].tolist()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user