test: enable all timestamptz case (#45128)

Issue: #44518

---------

Signed-off-by: Eric Hou <eric.hou@zilliz.com>
Co-authored-by: Eric Hou <eric.hou@zilliz.com>
This commit is contained in:
Feilong Hou 2025-11-21 11:03:06 +08:00 committed by GitHub
parent 275a5b9afc
commit 0231a3edf8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 382 additions and 133 deletions

View File

@ -2473,7 +2473,10 @@ def gen_timestamptz_str():
return base.strftime("%Y-%m-%dT%H:%M:%S") + "Z"
# otherwise use explicit offset
offset_hours = random.randint(-12, 14)
offset_minutes = random.choice([0, 30])
if offset_hours == -12 or offset_hours == 14:
offset_minutes = 0
else:
offset_minutes = random.choice([0, 30])
tz = timezone(timedelta(hours=offset_hours, minutes=offset_minutes))
local_dt = base.astimezone(tz)
tz_str = local_dt.strftime("%z") # "+0800"
@ -4192,6 +4195,12 @@ def convert_timestamptz(rows, timestamptz_field_name, timezone="UTC"):
Returns:
list of rows data with timestamptz string converted to desired timezone string
Note:
Naive timestamps (e.g. ``YYYY-MM-DD HH:MM:SS`` with no offset information)
are treated as already expressed in the desired timezone. In those cases we
simply append the correct offset for the provided timezone instead of
converting from UTC first.
"""
iso_offset_re = re.compile(r"([+-])(\d{2}):(\d{2})$")
@ -4308,37 +4317,40 @@ def convert_timestamptz(rows, timestamptz_field_name, timezone="UTC"):
dt_target = dt.astimezone(ZoneInfo(timezone))
return _format_with_offset_str(dt_target)
else:
# naive; interpret as time in target timezone and output there
y, mo, d, hh, mi, ss, _, _ = _parse_basic(raw)
if y == 0:
# Cannot use datetime; treat as local-like in target zone with fixed offset fallback
# Asia/Shanghai common case: +08:00
fixed_minutes = 480 if timezone == 'Asia/Shanghai' else 0
return _format_fixed(y, mo, d, hh, mi, ss, fixed_minutes)
local = datetime(y, mo, d, hh, mi, ss, tzinfo=ZoneInfo(timezone))
return _format_with_offset_str(local)
if not (1 <= y <= 9999):
raise ValueError("year out of range for datetime")
tzinfo = ZoneInfo(timezone)
dt_local = datetime(y, mo, d, hh, mi, ss, tzinfo=tzinfo)
return _format_with_offset_str(dt_local)
except Exception:
# manual fallback (handles year 0 and overflow beyond 9999)
y, mo, d, hh, mi, ss, offset, has_z = _parse_basic(raw)
if offset is None and not has_z:
# naive input outside datetime supported range; attach offset only
target_minutes = 0
try:
tzinfo = ZoneInfo(timezone)
ref_year = 2004 # leap year to keep Feb 29 valid
ref_dt = datetime(ref_year, mo, d, hh, mi, ss, tzinfo=tzinfo)
off_td = ref_dt.utcoffset()
if off_td is not None:
target_minutes = int(off_td.total_seconds() // 60)
except Exception:
if timezone == 'Asia/Shanghai':
target_minutes = 480
return _format_fixed(y, mo, d, hh, mi, ss, target_minutes)
# compute UTC components first
if offset is None and has_z:
uy, um, ud, uh, umi, uss = y, mo, d, hh, mi, ss
elif offset is None:
# treat naive as in target timezone; need target offset if possible
try:
if 1 <= y <= 9999:
local = datetime(y, mo, d, hh, mi, ss, tzinfo=ZoneInfo(timezone))
off_td = local.utcoffset() or tzmod.utc.utcoffset(local)
total = int(off_td.total_seconds() // 60)
# convert to UTC
sign = '+' if total >= 0 else '-'
total = abs(total)
offset = (sign, total // 60, total % 60)
else:
offset = ('+', 8, 0) if timezone == 'Asia/Shanghai' else ('+', 0, 0)
except Exception:
offset = ('+', 8, 0) if timezone == 'Asia/Shanghai' else ('+', 0, 0)
uy, um, ud, uh, umi, uss = _apply_offset_to_utc(y, mo, d, hh, mi, ss, offset)
# already handled above, but keep safety fallback to just append offset
if 1 <= y <= 9999:
tzinfo = ZoneInfo(timezone)
dt_local = datetime(y, mo, d, hh, mi, ss, tzinfo=tzinfo)
return _format_with_offset_str(dt_local)
target_minutes = 480 if timezone == 'Asia/Shanghai' else 0
return _format_fixed(y, mo, d, hh, mi, ss, target_minutes)
else:
uy, um, ud, uh, umi, uss = _apply_offset_to_utc(y, mo, d, hh, mi, ss, offset)
@ -4350,9 +4362,23 @@ def convert_timestamptz(rows, timestamptz_field_name, timezone="UTC"):
return _format_with_offset_str(dt_target)
except Exception:
pass
# fallback to fixed offset formatting (Asia/Shanghai -> +08:00 else Z)
# fallback: manually apply timezone offset when datetime conversion fails
# Get target timezone offset
target_minutes = 480 if timezone == 'Asia/Shanghai' else 0
return _format_fixed(uy, um, ud, uh, umi, uss, target_minutes)
try:
# Try to get actual offset from timezone if possible
if 1 <= uy <= 9999:
test_dt = datetime(uy, um, ud, uh, umi, uss, tzinfo=tzmod.utc)
test_target = test_dt.astimezone(ZoneInfo(timezone))
off_td = test_target.utcoffset() or tzmod.utc.utcoffset(test_target)
target_minutes = int(off_td.total_seconds() // 60)
except Exception:
pass
# Convert UTC to local time: UTC + offset = local
# Reverse the offset sign to convert UTC->local (opposite of local->UTC)
reverse_sign = '-' if target_minutes >= 0 else '+'
ty, tm, td, th, tmi, ts = _apply_offset_to_utc(uy, um, ud, uh, umi, uss, (reverse_sign, abs(target_minutes) // 60, abs(target_minutes) % 60))
return _format_fixed(ty, tm, td, th, tmi, ts, target_minutes)
new_rows = []
for row in rows:

View File

@ -43,7 +43,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_UTC(self):
"""
target: Test timestamptz can be successfully inserted and queried
@ -81,18 +80,66 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_Asia_Shanghai(self):
# BUG: https://github.com/milvus-io/milvus/issues/44595
def test_milvus_client_timestamptz_alter_database_property(self):
"""
target: Test timestamptz can be successfully inserted and queried
method:
1. Create a collection
1. Create a collection and alter database properties
2. Generate rows with timestamptz and insert the rows
3. Insert the rows
expected: Step 3 should result success
"""
# step 1: create collection
IANA_timezone = "America/New_York"
client = self._client()
db_name = cf.gen_unique_str("db")
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_database(client, db_name)
self.use_database(client, db_name)
self.alter_database_properties(client, db_name, properties={"timezone": IANA_timezone})
prop = self.describe_database(client, db_name)
assert prop[0]["timezone"] == IANA_timezone
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
prop = self.describe_collection(client, collection_name)[0].get("properties")
assert prop["timezone"] == IANA_timezone
# step 2: generate rows and insert the rows
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
# step 3: query the rows
new_rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: new_rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
self.drop_database(client, db_name)
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_timestamptz_alter_collection_property(self):
"""
target: Test timestamptz can be successfully inserted and queried
method:
1. Create a collection and alter collection properties
2. Generate rows with timestamptz and insert the rows
3. Insert the rows
expected: Step 3 should result success
"""
# step 1: create collection
IANA_timezone = "America/New_York"
client = self._client()
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
@ -106,24 +153,251 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
db_name = self.list_databases(client)[0]
self.alter_database_properties(client, db_name, properties={"database.timezone": "Asia/Shanghai"})
# step 2: generate rows and insert the rows
# step 2: alter collection properties
self.alter_collection_properties(client, collection_name, properties={"timezone": IANA_timezone})
prop = self.describe_collection(client, collection_name)[0].get("properties")
assert prop["timezone"] == IANA_timezone
# step 3: query the rows
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
# step 3: query the rows
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "Asia/Shanghai")
# step 4: query the rows
new_rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
check_task=CheckTasks.check_query_results,
check_items={exp_res: new_rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_alter_collection_property_after_insert(self):
"""
target: Test timestamptz can be successfully inserted and queried after alter collection properties
method:
1. Create a collection and insert the rows
2. Alter collection properties
3. Insert the rows
expected: Step 3 should result success
"""
# step 1: create collection
IANA_timezone = "America/New_York"
client = self._client()
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
# step 2: insert the rows
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
# verify the rows are in UTC time
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "UTC")
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
# step 3: alter collection properties
self.alter_collection_properties(client, collection_name, properties={"timezone": IANA_timezone})
prop = self.describe_collection(client, collection_name)[0].get("properties")
assert prop["timezone"] == IANA_timezone
# step 4: query the rows
new_rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: new_rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_timestamptz_alter_two_collections_property_after_alter_database_property(self):
"""
target: Test timestamptz can be successfully inserted and queried after alter database and collection property
method:
1. Alter database property and then create 2 collections
2. Alter collection properties of the 2 collections
3. Insert the rows into the 2 collections
4. Query the rows from the 2 collections
expected: Step 4 should result success
"""
# step 1: alter database property and then create 2 collections
IANA_timezone_1 = "America/New_York"
IANA_timezone_2 = "Asia/Shanghai"
client = self._client()
db_name = cf.gen_unique_str("db")
self.create_database(client, db_name)
self.use_database(client, db_name)
collection_name1 = cf.gen_collection_name_by_testcase_name() + "_1"
collection_name2 = cf.gen_collection_name_by_testcase_name() + "_2"
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.alter_database_properties(client, db_name, properties={"timezone": IANA_timezone_1})
self.create_collection(client, collection_name1, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params, database_name=db_name)
self.create_collection(client, collection_name2, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params, database_name=db_name)
# step 2: alter collection properties of the 1 collections
prop = self.describe_collection(client, collection_name1)[0].get("properties")
assert prop["timezone"] == IANA_timezone_1
self.alter_collection_properties(client, collection_name2, properties={"timezone": IANA_timezone_2})
prop = self.describe_collection(client, collection_name2)[0].get("properties")
assert prop["timezone"] == IANA_timezone_2
self.alter_database_properties(client, db_name, properties={"timezone": "America/Los_Angeles"})
prop = self.describe_database(client, db_name)[0]
assert prop["timezone"] == "America/Los_Angeles"
# step 3: insert the rows into the 2 collections
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name1, rows)
self.insert(client, collection_name2, rows)
# step 4: query the rows from the 2 collections
new_rows1 = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone_1)
new_rows2 = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone_2)
self.query(client, collection_name1, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: new_rows1,
"pk_name": default_primary_key_field_name})
self.query(client, collection_name2, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: new_rows2,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name1)
self.drop_collection(client, collection_name2)
self.drop_database(client, db_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_timestamptz_alter_database_property_after_alter_collection_property(self):
"""
target: Test timestamptz can be successfully queried after alter database property
method:
1. Create a database and collection
2. Alter collection properties
3. Insert the rows and query the rows in UTC time
4. Alter database property
5. Query the rows and result should be the collection's timezone
expected: Step 2-5 should result success
"""
# step 1: alter collection properties and then alter database property
IANA_timezone = "America/New_York"
client = self._client()
db_name = cf.gen_unique_str("db")
self.create_database(client, db_name)
self.use_database(client, db_name)
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
# step 2: alter collection properties
self.alter_collection_properties(client, collection_name, properties={"timezone": IANA_timezone})
prop = self.describe_collection(client, collection_name)[0].get("properties")
assert prop["timezone"] == IANA_timezone
# step 3: insert the rows
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
# step 4: alter database property
new_timezone = "Asia/Shanghai"
self.alter_database_properties(client, db_name, properties={"timezone": new_timezone})
prop = self.describe_database(client, db_name)[0]
assert prop["timezone"] == new_timezone
# step 5: query the rows
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
self.drop_database(client, db_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_timestamptz_alter_collection_property_and_query_from_different_timezone(self):
"""
target: Test timestamptz can be successfully queried from different timezone
method:
1. Create a collection
2. Alter collection properties to America/New_York timezone
3. Insert the rows and query the rows in UTC time
4. Query the rows from the Asia/Shanghai timezone
expected: Step 4 should result success
"""
# step 1: create collection
IANA_timezone_1 = "America/New_York"
IANA_timezone_2 = "Asia/Shanghai"
client = self._client()
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
# step 2: Alter collection properties
self.alter_collection_properties(client, collection_name, properties={"timezone": IANA_timezone_1})
prop = self.describe_collection(client, collection_name)[0].get("properties")
assert prop["timezone"] == IANA_timezone_1
# step 3: insert the rows
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone_1)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
# step 4: query the rows
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, IANA_timezone_2)
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
timezone=IANA_timezone_2,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_timestamptz_edge_case(self):
"""
target: Test timestamptz can be successfully inserted and queried
@ -166,7 +440,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_Feb_29(self):
"""
target: Milvus raise error when input data with Feb 29
@ -205,9 +478,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_partial_update(self):
# BUG: https://github.com/milvus-io/milvus/issues/44527
"""
target: Test timestamptz can be successfully inserted and queried
method:
@ -250,9 +521,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_default_value(self):
# BUG: https://github.com/milvus-io/milvus/issues/44585
"""
target: Test timestamptz can be successfully inserted and queried with default value
method:
@ -281,7 +550,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
# step 3: query the rows
for row in rows:
row[default_timestamp_field_name] = "2025-01-01T00:00:00+08:00"
row[default_timestamp_field_name] = "2025-01-01T00:00:00"
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "UTC")
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
@ -291,9 +560,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_search(self):
# BUG: https://github.com/milvus-io/milvus/issues/44594
"""
target: Milvus can search with timestamptz expr
method:
@ -336,9 +603,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_query(self):
# BUG: https://github.com/milvus-io/milvus/issues/44598
"""
target: Milvus can query with timestamptz expr
method:
@ -352,17 +617,17 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=3)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_collection(client, collection_name, default_dim, schema=schema,
self.create_collection(client, collection_name, 3, schema=schema,
consistency_level="Strong", index_params=index_params)
# step 2: generate rows with timestamptz and insert the rows
rows = [{default_primary_key_field_name: 0, default_vector_field_name: [1,2,3], default_timestamp_field_name: "0000-01-01 00:00:00"},
rows = [{default_primary_key_field_name: 0, default_vector_field_name: [1,2,3], default_timestamp_field_name: "1970-01-01 00:00:00"},
{default_primary_key_field_name: 1, default_vector_field_name: [4,5,6], default_timestamp_field_name: "2021-02-28T00:00:00Z"},
{default_primary_key_field_name: 2, default_vector_field_name: [7,8,9], default_timestamp_field_name: "2025-05-25T23:46:05"},
{default_primary_key_field_name: 3, default_vector_field_name: [10,11,12], default_timestamp_field_name:"2025-05-30T23:46:05+05:30"},
@ -371,98 +636,63 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.insert(client, collection_name, rows)
# step 3: query with timestamptz expr
shanghai_time_row = cf.convert_timestamptz(rows, default_timestamp_field_name, "Asia/Shanghai")
UTC_time_row = cf.convert_timestamptz(rows, default_timestamp_field_name, "UTC")
shanghai_time_row = cf.convert_timestamptz(UTC_time_row, default_timestamp_field_name, "Asia/Shanghai")
self.query(client, collection_name, filter=default_search_exp,
timezone="Asia/Shanghai",
time_fields="year, month, day, hour, minute, second, microsecond",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
"pk_name": default_primary_key_field_name})
# >=
expr = f"{default_timestamp_field_name} >= ISO '2025-05-30T23:46:05+05:30'"
self.query(client, collection_name, filter=expr,
timezone="Asia/Shanghai",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
check_items={exp_res: shanghai_time_row[3:],
"pk_name": default_primary_key_field_name})
# ==
expr = f"{default_timestamp_field_name} == ISO '9999-12-31T23:46:05Z'"
self.query(client, collection_name, filter=expr,
timezone="Asia/Shanghai",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
check_items={exp_res: [shanghai_time_row[-1]],
"pk_name": default_primary_key_field_name})
# <=
expr = f"{default_timestamp_field_name} <= ISO '2025-01-01T00:00:00+08:00'"
self.query(client, collection_name, filter=expr,
timezone="Asia/Shanghai",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
check_items={exp_res: shanghai_time_row[:2],
"pk_name": default_primary_key_field_name})
# !=
expr = f"{default_timestamp_field_name} != ISO '9999-12-31T23:46:05'"
expr = f"{default_timestamp_field_name} != ISO '9999-12-31T23:46:05Z'"
self.query(client, collection_name, filter=expr,
timezone="Asia/Shanghai",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
check_items={exp_res: shanghai_time_row[:-1],
"pk_name": default_primary_key_field_name})
# INTERVAL
expr = f"{default_timestamp_field_name} + INTERVAL 'P3D' != ISO '0000-01-02T00:00:00Z'"
expr = f"{default_timestamp_field_name} - INTERVAL 'P3D' >= ISO '1970-01-01T00:00:00Z'"
self.query(client, collection_name, filter=expr,
timezone="Asia/Shanghai",
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
check_items={exp_res: shanghai_time_row[1:],
"pk_name": default_primary_key_field_name})
# lower < tz < upper
# BUG: https://github.com/milvus-io/milvus/issues/44600
expr = f"ISO '2025-01-01T00:00:00+08:00' < {default_timestamp_field_name} < ISO '2026-10-05T12:56:34+08:00'"
self.query(client, collection_name, filter=expr,
check_task=CheckTasks.check_query_results,
check_items={exp_res: shanghai_time_row,
"pk_name": default_primary_key_field_name})
# expr = f"ISO '2025-01-01T00:00:00+08:00' < {default_timestamp_field_name} < ISO '2026-10-05T12:56:34+08:00'"
# self.query(client, collection_name, filter=expr,
# check_task=CheckTasks.check_query_results,
# check_items={exp_res: shanghai_time_row,
# "pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_alter_collection(self):
"""
target: Milvus raise error when alter collection properties
method:
1. Create a collection
2. Alter collection properties
3. Query the rows
expected: Step 3 should result success
"""
# step 1: create collection
client = self._client()
collection_name = cf.gen_collection_name_by_testcase_name()
schema = self.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field(default_primary_key_field_name, DataType.INT64, is_primary=True, auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
# step 2: alter collection properties
self.alter_collection_properties(client, collection_name, properties={"timezone": "Asia/Shanghai"})
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
# step 3: query the rows
rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "Asia/Shanghai")
self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"pk_name": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_collection_field(self):
# BUG: https://github.com/milvus-io/milvus/issues/44527
"""
target: Milvus raise error when add collection field with timestamptz
method:
@ -516,7 +746,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
desired_field_names=[default_primary_key_field_name, default_timestamp_field_name])
self.upsert(client, collection_name, pu_rows, partial_update=True)
pu_rows = cf.convert_timestamptz(pu_rows, default_timestamp_field_name, "UTC")
self.query(client, collection_name, filter=f"0 <= {default_primary_key_field_name} <= {default_nb}",
self.query(client, collection_name, filter=f"0 <= {default_primary_key_field_name} < {default_nb}",
check_task=CheckTasks.check_query_results,
output_fields=[default_timestamp_field_name],
check_items={exp_res: pu_rows,
@ -525,7 +755,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_field_compaction(self):
"""
target: test compaction with added timestamptz field
@ -576,9 +805,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_field_search(self):
# BUG: https://github.com/milvus-io/milvus/issues/44622
"""
target: test add field with timestamptz and search
method:
@ -608,7 +835,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.add_collection_field(client, collection_name, field_name=default_timestamp_field_name, data_type=DataType.TIMESTAMPTZ,
nullable=True)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True)
index_params.add_index(default_timestamp_field_name, index_type="STL_SORT")
index_params.add_index(default_timestamp_field_name, index_type="AUTOINDEX")
self.create_index(client, collection_name, index_params=index_params)
# step 4: search the rows
@ -623,7 +850,9 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
filter=f"{default_timestamp_field_name} is null",
check_task=CheckTasks.check_search_results,
check_items=check_items)
new_rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, new_rows)
self.search(client, collection_name, vectors_to_search,
filter=f"{default_timestamp_field_name} is not null",
check_task=CheckTasks.check_search_results,
@ -632,7 +861,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_field_with_default_value(self):
"""
target: Milvus raise error when add field with timestamptz and default value
@ -655,7 +883,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
consistency_level="Strong", index_params=index_params)
# step 2: add field with timestamptz and default value
default_timestamp_value = "2025-01-01T00:00:00"
default_timestamp_value = "2025-01-01T00:00:00Z"
rows = cf.gen_row_data_by_schema(nb=default_nb, schema=schema)
self.insert(client, collection_name, rows)
self.add_collection_field(client, collection_name, field_name=default_timestamp_field_name, data_type=DataType.TIMESTAMPTZ,
@ -674,7 +902,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_another_timestamptz_field(self):
"""
target: Milvus raise error when add another timestamptz field
@ -726,7 +953,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_insert_delete_upsert_with_flush(self):
"""
target: test insert, delete, upsert with flush on timestamptz
@ -783,9 +1009,7 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_insert_upsert_flush_delete_upsert_flush(self):
# BUG: blocked by partial update
"""
target: test insert, upsert, flush, delete, upsert with flush on timestamptz
method:
@ -840,7 +1064,6 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
"pk_name": default_primary_key_field_name})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_read_from_different_client(self):
"""
target: test read from different client in different timezone
@ -870,8 +1093,9 @@ class TestMilvusClientTimestamptzValid(TestMilvusClientV2Base):
# step 3: query the rows from different client in different timezone
client2 = self._client()
shanghai_rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "Asia/Shanghai")
LA_rows = cf.convert_timestamptz(rows, default_timestamp_field_name, "America/Los_Angeles")
UTC_time_row = cf.convert_timestamptz(rows, default_timestamp_field_name, "UTC")
shanghai_rows = cf.convert_timestamptz(UTC_time_row, default_timestamp_field_name, "Asia/Shanghai")
LA_rows = cf.convert_timestamptz(UTC_time_row, default_timestamp_field_name, "America/Los_Angeles")
result_1 = self.query(client, collection_name, filter=f"{default_primary_key_field_name} >= 0",
check_task=CheckTasks.check_query_results,
timezone="Asia/Shanghai",
@ -896,9 +1120,7 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_input_data_invalid_time_format(self):
# BUG: https://github.com/milvus-io/milvus/issues/44537
"""
target: Milvus raise error when input data with invalid time format
method:
@ -933,7 +1155,8 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
# step 3: query the rows
for row in rows:
error = {ct.err_code: 1, ct.err_msg: f"got invalid timestamptz string: {row[default_timestamp_field_name]}"}
print(row[default_timestamp_field_name])
error = {ct.err_code: 1100, ct.err_msg: f"got invalid timestamptz string '{row[default_timestamp_field_name]}': invalid timezone name; must be a valid IANA Time Zone ID (e.g., 'Asia/Shanghai' or 'UTC'): invalid parameter"}
self.insert(client, collection_name, row,
check_task=CheckTasks.err_res,
check_items=error)
@ -941,7 +1164,6 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_wrong_index_type(self):
"""
target: Milvus raise error when input data with wrong index type
@ -960,11 +1182,13 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
index_params.add_index(default_primary_key_field_name, index_type="AUTOINDEX")
index_params.add_index(default_vector_field_name, index_type="AUTOINDEX")
index_params.add_index(default_timestamp_field_name, index_type="INVERTED")
error = {ct.err_code: 1100, ct.err_msg: "INVERTED are not supported on Timestamptz field: invalid parameter[expected=valid index params][actual=invalid index params]"}
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong", index_params=index_params)
consistency_level="Strong", index_params=index_params,
check_task=CheckTasks.err_res,
check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_wrong_default_value(self):
"""
target: Milvus raise error when input data with wrong default value
@ -981,7 +1205,7 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True, default_value="timestamp")
error = {ct.err_code: 1100, ct.err_msg: "type (Timestamptz) of field (timestamp) is not equal to the type(DataType_VarChar) of default_value: invalid parameter"}
error = {ct.err_code: 65536, ct.err_msg: "invalid timestamp string: 'timestamp'. Does not match any known format"}
self.create_collection(client, collection_name, default_dim, schema=schema,
consistency_level="Strong",
check_task=CheckTasks.err_res, check_items=error)
@ -992,15 +1216,14 @@ class TestMilvusClientTimestamptzInvalid(TestMilvusClientV2Base):
new_schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
new_schema.add_field(default_timestamp_field_name, DataType.TIMESTAMPTZ, nullable=True, default_value=10)
error = {ct.err_code: 1100, ct.err_msg: "type (Timestamptz) of field (timestamp) is not equal to the type(DataType_VarChar) of default_value: invalid parameter"}
self.create_collection(client, collection_name, default_dim, schema=schema,
error = {ct.err_code: 65536, ct.err_msg: "type (Timestamptz) of field (timestamp) is not equal to the type(DataType_Int64) of default_value: invalid parameter"}
self.create_collection(client, collection_name, default_dim, schema=new_schema,
consistency_level="Strong",
check_task=CheckTasks.err_res, check_items=error)
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="timesptamptz is not ready")
def test_milvus_client_timestamptz_add_field_not_nullable(self):
"""
target: Milvus raise error when add non-nullable timestamptz field