fix: Int64 overflow in JSON encoding (#37657)

issue: ##36621

- For simple types in a struct, add "string" to the JSON tag for
automatic string conversion during JSON encoding.
- For complex types in a struct, replace "int64" with "string."

Signed-off-by: jaime <yun.zhang@zilliz.com>
This commit is contained in:
jaime 2024-11-14 22:52:30 +08:00 committed by GitHub
parent c5485bb1b1
commit 1d06d4324b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
48 changed files with 2125 additions and 1554 deletions

View File

@ -262,7 +262,7 @@ func combineToSegmentIndexesMeta220(segmentIndexes SegmentIndexesMeta210, indexB
IndexState: buildMeta.GetState(), IndexState: buildMeta.GetState(),
FailReason: buildMeta.GetFailReason(), FailReason: buildMeta.GetFailReason(),
IsDeleted: buildMeta.GetMarkDeleted(), IsDeleted: buildMeta.GetMarkDeleted(),
CreateTime: record.GetCreateTime(), CreatedUTCTime: record.GetCreateTime(),
IndexFileKeys: fileKeys, IndexFileKeys: fileKeys,
IndexSize: buildMeta.GetSerializeSize(), IndexSize: buildMeta.GetSerializeSize(),
WriteHandoff: buildMeta.GetState() == commonpb.IndexState_Finished, WriteHandoff: buildMeta.GetState() == commonpb.IndexState_Finished,

2
go.mod
View File

@ -23,7 +23,7 @@ require (
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0
github.com/klauspost/compress v1.17.9 github.com/klauspost/compress v1.17.9
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620 github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f
github.com/minio/minio-go/v7 v7.0.73 github.com/minio/minio-go/v7 v7.0.73
github.com/pingcap/log v1.1.1-0.20221015072633-39906604fb81 github.com/pingcap/log v1.1.1-0.20221015072633-39906604fb81
github.com/prometheus/client_golang v1.14.0 github.com/prometheus/client_golang v1.14.0

4
go.sum
View File

@ -628,8 +628,8 @@ github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119 h1:9VXijWu
github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119/go.mod h1:DvXTE/K/RtHehxU8/GtDs4vFtfw64jJ3PaCnFri8CRg= github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119/go.mod h1:DvXTE/K/RtHehxU8/GtDs4vFtfw64jJ3PaCnFri8CRg=
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b h1:TfeY0NxYxZzUfIfYe5qYDBzt4ZYRqzUjTR6CvUzjat8= github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b h1:TfeY0NxYxZzUfIfYe5qYDBzt4ZYRqzUjTR6CvUzjat8=
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b/go.mod h1:iwW+9cWfIzzDseEBCCeDSN5SD16Tidvy8cwQ7ZY8Qj4= github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b/go.mod h1:iwW+9cWfIzzDseEBCCeDSN5SD16Tidvy8cwQ7ZY8Qj4=
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620 h1:0IWUDtDloift7cQHalhdjuVkL/3qSeiXFqR7MofZBkg= github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f h1:yLxT8NH0ixUOJMqJuk0xvGf0cKsr+N2xibyTat256PI=
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs= github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs=
github.com/milvus-io/pulsar-client-go v0.12.1 h1:O2JZp1tsYiO7C0MQ4hrUY/aJXnn2Gry6hpm7UodghmE= github.com/milvus-io/pulsar-client-go v0.12.1 h1:O2JZp1tsYiO7C0MQ4hrUY/aJXnn2Gry6hpm7UodghmE=
github.com/milvus-io/pulsar-client-go v0.12.1/go.mod h1:dkutuH4oS2pXiGm+Ti7fQZ4MRjrMPZ8IJeEGAWMeckk= github.com/milvus-io/pulsar-client-go v0.12.1/go.mod h1:dkutuH4oS2pXiGm+Ti7fQZ4MRjrMPZ8IJeEGAWMeckk=
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=

View File

@ -19,10 +19,12 @@ package datacoord
import ( import (
"context" "context"
"encoding/json" "encoding/json"
"strconv"
"sync" "sync"
"time" "time"
"github.com/hashicorp/golang-lru/v2/expirable" "github.com/hashicorp/golang-lru/v2/expirable"
"github.com/samber/lo"
"go.uber.org/zap" "go.uber.org/zap"
"google.golang.org/protobuf/proto" "google.golang.org/protobuf/proto"
@ -31,6 +33,7 @@ import (
"github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/log"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/timerecord"
"github.com/milvus-io/milvus/pkg/util/typeutil"
) )
func newCompactionTaskStats(task *datapb.CompactionTask) *metricsinfo.CompactionTask { func newCompactionTaskStats(task *datapb.CompactionTask) *metricsinfo.CompactionTask {
@ -40,11 +43,15 @@ func newCompactionTaskStats(task *datapb.CompactionTask) *metricsinfo.Compaction
Type: task.Type.String(), Type: task.Type.String(),
State: task.State.String(), State: task.State.String(),
FailReason: task.FailReason, FailReason: task.FailReason,
StartTime: task.StartTime, StartTime: typeutil.TimestampToString(uint64(task.StartTime)),
EndTime: task.EndTime, EndTime: typeutil.TimestampToString(uint64(task.EndTime)),
TotalRows: task.TotalRows, TotalRows: task.TotalRows,
InputSegments: task.InputSegments, InputSegments: lo.Map(task.InputSegments, func(t int64, i int) string {
ResultSegments: task.ResultSegments, return strconv.FormatInt(t, 10)
}),
ResultSegments: lo.Map(task.ResultSegments, func(t int64, i int) string {
return strconv.FormatInt(t, 10)
}),
} }
} }

View File

@ -319,7 +319,7 @@ func Test_compactionTrigger_force(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -338,7 +338,7 @@ func Test_compactionTrigger_force(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -357,7 +357,7 @@ func Test_compactionTrigger_force(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,

View File

@ -498,7 +498,7 @@ func createMetaForRecycleUnusedSegIndexes(catalog metastore.DataCoordCatalog) *m
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -517,7 +517,7 @@ func createMetaForRecycleUnusedSegIndexes(catalog metastore.DataCoordCatalog) *m
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -543,7 +543,7 @@ func createMetaForRecycleUnusedSegIndexes(catalog metastore.DataCoordCatalog) *m
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
}) })
@ -560,7 +560,7 @@ func createMetaForRecycleUnusedSegIndexes(catalog metastore.DataCoordCatalog) *m
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
}) })
@ -663,7 +663,7 @@ func createMetaTableForRecycleUnusedIndexFiles(catalog *datacoord.Catalog) *meta
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -682,7 +682,7 @@ func createMetaTableForRecycleUnusedIndexFiles(catalog *datacoord.Catalog) *meta
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -721,7 +721,7 @@ func createMetaTableForRecycleUnusedIndexFiles(catalog *datacoord.Catalog) *meta
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -738,7 +738,7 @@ func createMetaTableForRecycleUnusedIndexFiles(catalog *datacoord.Catalog) *meta
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1063,7 +1063,7 @@ func TestGarbageCollector_clearETCD(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 1024, IndexSize: 1024,
WriteHandoff: false, WriteHandoff: false,
@ -1082,7 +1082,7 @@ func TestGarbageCollector_clearETCD(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: []string{"file3", "file4"}, IndexFileKeys: []string{"file3", "file4"},
IndexSize: 1024, IndexSize: 1024,
WriteHandoff: false, WriteHandoff: false,
@ -1149,7 +1149,7 @@ func TestGarbageCollector_clearETCD(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 1024, IndexSize: 1024,
WriteHandoff: false, WriteHandoff: false,
@ -1167,7 +1167,7 @@ func TestGarbageCollector_clearETCD(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: []string{"file3", "file4"}, IndexFileKeys: []string{"file3", "file4"},
IndexSize: 1024, IndexSize: 1024,
WriteHandoff: false, WriteHandoff: false,

View File

@ -43,6 +43,7 @@ import (
"github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/log"
"github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/metrics"
"github.com/milvus-io/milvus/pkg/util/indexparams" "github.com/milvus-io/milvus/pkg/util/indexparams"
"github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/timerecord"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -63,20 +64,8 @@ type indexMeta struct {
segmentIndexes map[UniqueID]map[UniqueID]*model.SegmentIndex segmentIndexes map[UniqueID]map[UniqueID]*model.SegmentIndex
} }
type indexTaskStats struct { func newIndexTaskStats(s *model.SegmentIndex) *metricsinfo.IndexTaskStats {
IndexID UniqueID `json:"index_id,omitempty"` return &metricsinfo.IndexTaskStats{
CollectionID UniqueID `json:"collection_id,omitempty"`
SegmentID UniqueID `json:"segment_id,omitempty"`
BuildID UniqueID `json:"build_id,omitempty"`
IndexState string `json:"index_state,omitempty"`
FailReason string `json:"fail_reason,omitempty"`
IndexSize uint64 `json:"index_size,omitempty"`
IndexVersion int64 `json:"index_version,omitempty"`
CreateTime uint64 `json:"create_time,omitempty"`
}
func newIndexTaskStats(s *model.SegmentIndex) *indexTaskStats {
return &indexTaskStats{
IndexID: s.IndexID, IndexID: s.IndexID,
CollectionID: s.CollectionID, CollectionID: s.CollectionID,
SegmentID: s.SegmentID, SegmentID: s.SegmentID,
@ -85,7 +74,7 @@ func newIndexTaskStats(s *model.SegmentIndex) *indexTaskStats {
FailReason: s.FailReason, FailReason: s.FailReason,
IndexSize: s.IndexSize, IndexSize: s.IndexSize,
IndexVersion: s.IndexVersion, IndexVersion: s.IndexVersion,
CreateTime: s.CreateTime, CreatedUTCTime: typeutil.TimestampToString(s.CreatedUTCTime),
} }
} }
@ -94,7 +83,7 @@ type segmentBuildInfo struct {
// buildID -> segmentIndex // buildID -> segmentIndex
buildID2SegmentIndex map[UniqueID]*model.SegmentIndex buildID2SegmentIndex map[UniqueID]*model.SegmentIndex
// taskStats records the task stats of the segment // taskStats records the task stats of the segment
taskStats *expirable.LRU[UniqueID, *indexTaskStats] taskStats *expirable.LRU[UniqueID, *metricsinfo.IndexTaskStats]
} }
func newSegmentIndexBuildInfo() *segmentBuildInfo { func newSegmentIndexBuildInfo() *segmentBuildInfo {
@ -102,7 +91,7 @@ func newSegmentIndexBuildInfo() *segmentBuildInfo {
// build ID -> segment index // build ID -> segment index
buildID2SegmentIndex: make(map[UniqueID]*model.SegmentIndex), buildID2SegmentIndex: make(map[UniqueID]*model.SegmentIndex),
// build ID -> task stats // build ID -> task stats
taskStats: expirable.NewLRU[UniqueID, *indexTaskStats](64, nil, time.Minute*30), taskStats: expirable.NewLRU[UniqueID, *metricsinfo.IndexTaskStats](64, nil, time.Minute*30),
} }
} }
@ -124,7 +113,7 @@ func (m *segmentBuildInfo) List() map[UniqueID]*model.SegmentIndex {
return m.buildID2SegmentIndex return m.buildID2SegmentIndex
} }
func (m *segmentBuildInfo) GetTaskStats() []*indexTaskStats { func (m *segmentBuildInfo) GetTaskStats() []*metricsinfo.IndexTaskStats {
return m.taskStats.Values() return m.taskStats.Values()
} }

View File

@ -37,6 +37,7 @@ import (
"github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/indexpb"
"github.com/milvus-io/milvus/internal/proto/workerpb" "github.com/milvus-io/milvus/internal/proto/workerpb"
"github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/common"
"github.com/milvus-io/milvus/pkg/util/metricsinfo"
) )
func TestReloadFromKV(t *testing.T) { func TestReloadFromKV(t *testing.T) {
@ -527,7 +528,7 @@ func TestMeta_AddSegmentIndex(t *testing.T) {
IndexState: 0, IndexState: 0,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 12, CreatedUTCTime: 12,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
} }
@ -674,7 +675,7 @@ func TestMeta_GetSegmentIndexState(t *testing.T) {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 12, CreatedUTCTime: 12,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })
@ -696,7 +697,7 @@ func TestMeta_GetSegmentIndexState(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 12, CreatedUTCTime: 12,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })
@ -744,7 +745,7 @@ func TestMeta_GetIndexedSegment(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}, },
@ -780,7 +781,7 @@ func TestMeta_GetIndexedSegment(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 10, CreatedUTCTime: 10,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })
@ -1102,7 +1103,7 @@ func TestMeta_GetIndexJob(t *testing.T) {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })
@ -1190,7 +1191,7 @@ func updateSegmentIndexMeta(t *testing.T) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })
@ -1211,7 +1212,7 @@ func updateSegmentIndexMeta(t *testing.T) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}, },
@ -1528,7 +1529,7 @@ func TestBuildIndexTaskStatsJSON(t *testing.T) {
FailReason: "", FailReason: "",
IndexSize: 1024, IndexSize: 1024,
IndexVersion: 1, IndexVersion: 1,
CreateTime: uint64(time.Now().Unix()), CreatedUTCTime: uint64(time.Now().Unix()),
} }
si2 := &model.SegmentIndex{ si2 := &model.SegmentIndex{
BuildID: 2, BuildID: 2,
@ -1539,7 +1540,7 @@ func TestBuildIndexTaskStatsJSON(t *testing.T) {
FailReason: "", FailReason: "",
IndexSize: 2048, IndexSize: 2048,
IndexVersion: 1, IndexVersion: 1,
CreateTime: uint64(time.Now().Unix()), CreatedUTCTime: uint64(time.Now().Unix()),
} }
actualJSON := im.TaskStatsJSON() actualJSON := im.TaskStatsJSON()
@ -1553,7 +1554,7 @@ func TestBuildIndexTaskStatsJSON(t *testing.T) {
assert.True(t, ok) assert.True(t, ok)
assert.EqualValues(t, si1, ret1) assert.EqualValues(t, si1, ret1)
expectedTasks := []*indexTaskStats{ expectedTasks := []*metricsinfo.IndexTaskStats{
newIndexTaskStats(si1), newIndexTaskStats(si1),
newIndexTaskStats(si2), newIndexTaskStats(si2),
} }

View File

@ -70,7 +70,7 @@ func (s *Server) createIndexForSegment(segment *SegmentInfo, indexID UniqueID) e
NumRows: segment.NumOfRows, NumRows: segment.NumOfRows,
IndexID: indexID, IndexID: indexID,
BuildID: buildID, BuildID: buildID,
CreateTime: uint64(segment.ID), CreatedUTCTime: uint64(time.Now().Unix()),
WriteHandoff: false, WriteHandoff: false,
} }
if err = s.meta.indexMeta.AddSegmentIndex(segIndex); err != nil { if err = s.meta.indexMeta.AddSegmentIndex(segIndex); err != nil {

View File

@ -411,7 +411,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -428,7 +428,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -445,7 +445,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -462,7 +462,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "mock failed", FailReason: "mock failed",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -479,7 +479,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -496,7 +496,7 @@ func TestServer_AlterIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 1: { indexID + 1: {
SegmentID: segID, SegmentID: segID,
@ -508,7 +508,7 @@ func TestServer_AlterIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 3: { indexID + 3: {
SegmentID: segID, SegmentID: segID,
@ -520,7 +520,7 @@ func TestServer_AlterIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 4: { indexID + 4: {
SegmentID: segID, SegmentID: segID,
@ -533,7 +533,7 @@ func TestServer_AlterIndex(t *testing.T) {
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "mock failed", FailReason: "mock failed",
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 5: { indexID + 5: {
SegmentID: segID, SegmentID: segID,
@ -545,7 +545,7 @@ func TestServer_AlterIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
}, },
}, },
@ -819,7 +819,7 @@ func TestServer_GetIndexState(t *testing.T) {
IndexState: commonpb.IndexState_IndexStateNone, IndexState: commonpb.IndexState_IndexStateNone,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -944,7 +944,7 @@ func TestServer_GetSegmentIndexState(t *testing.T) {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 1025, IndexSize: 1025,
WriteHandoff: false, WriteHandoff: false,
@ -981,7 +981,7 @@ func TestServer_GetSegmentIndexState(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 1025, IndexSize: 1025,
WriteHandoff: false, WriteHandoff: false,
@ -1103,7 +1103,7 @@ func TestServer_GetIndexBuildProgress(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: []string{"file1", "file2"}, IndexFileKeys: []string{"file1", "file2"},
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1365,7 +1365,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1382,7 +1382,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1399,7 +1399,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1416,7 +1416,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "mock failed", FailReason: "mock failed",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1433,7 +1433,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1450,7 +1450,7 @@ func TestServer_DescribeIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 1: { indexID + 1: {
SegmentID: segID - 1, SegmentID: segID - 1,
@ -1462,7 +1462,7 @@ func TestServer_DescribeIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 3: { indexID + 3: {
SegmentID: segID - 1, SegmentID: segID - 1,
@ -1474,7 +1474,7 @@ func TestServer_DescribeIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 4: { indexID + 4: {
SegmentID: segID - 1, SegmentID: segID - 1,
@ -1487,7 +1487,7 @@ func TestServer_DescribeIndex(t *testing.T) {
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "mock failed", FailReason: "mock failed",
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
indexID + 5: { indexID + 5: {
SegmentID: segID - 1, SegmentID: segID - 1,
@ -1499,7 +1499,7 @@ func TestServer_DescribeIndex(t *testing.T) {
NodeID: 0, NodeID: 0,
IndexVersion: 1, IndexVersion: 1,
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
CreateTime: createTS, CreatedUTCTime: createTS,
}, },
}, },
}, },
@ -1870,7 +1870,7 @@ func TestServer_GetIndexStatistics(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1887,7 +1887,7 @@ func TestServer_GetIndexStatistics(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1904,7 +1904,7 @@ func TestServer_GetIndexStatistics(t *testing.T) {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1921,7 +1921,7 @@ func TestServer_GetIndexStatistics(t *testing.T) {
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "mock failed", FailReason: "mock failed",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -1938,7 +1938,7 @@ func TestServer_GetIndexStatistics(t *testing.T) {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,
@ -2262,7 +2262,7 @@ func TestServer_GetIndexInfos(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: createTS, CreatedUTCTime: createTS,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
WriteHandoff: false, WriteHandoff: false,

View File

@ -37,6 +37,7 @@ import (
"github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
"github.com/milvus-io/milvus/pkg/util/uniquegenerator" "github.com/milvus-io/milvus/pkg/util/uniquegenerator"
) )
@ -91,7 +92,7 @@ func (s *Server) getChannelsJSON(ctx context.Context, req *milvuspb.GetMetricsRe
channel2Checkpoints := s.meta.GetChannelCheckpoints() channel2Checkpoints := s.meta.GetChannelCheckpoints()
for _, channel := range channels { for _, channel := range channels {
if cp, ok := channel2Checkpoints[channel.Name]; ok { if cp, ok := channel2Checkpoints[channel.Name]; ok {
channel.CheckpointTS = typeutil.TimestampToString(cp.GetTimestamp()) channel.CheckpointTS = tsoutil.PhysicalTimeFormat(cp.GetTimestamp())
} else { } else {
log.Warn("channel not found in meta cache", zap.String("channel", channel.Name)) log.Warn("channel not found in meta cache", zap.String("channel", channel.Name))
} }
@ -139,15 +140,11 @@ func (s *Server) getDistJSON(ctx context.Context, req *milvuspb.GetMetricsReques
dmChannel := metrics.NewDMChannelFrom(chInfo.GetVchan()) dmChannel := metrics.NewDMChannelFrom(chInfo.GetVchan())
dmChannel.NodeID = nodeID dmChannel.NodeID = nodeID
dmChannel.WatchState = chInfo.State.String() dmChannel.WatchState = chInfo.State.String()
dmChannel.StartWatchTS = chInfo.GetStartTs() dmChannel.StartWatchTS = typeutil.TimestampToString(uint64(chInfo.GetStartTs()))
channels = append(channels, dmChannel) channels = append(channels, dmChannel)
} }
} }
if len(segments) == 0 && len(channels) == 0 {
return ""
}
dist := &metricsinfo.DataCoordDist{ dist := &metricsinfo.DataCoordDist{
Segments: segments, Segments: segments,
DMChannels: channels, DMChannels: channels,

View File

@ -34,6 +34,7 @@ import (
"github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -216,8 +217,8 @@ func TestGetSyncTaskMetrics(t *testing.T) {
SegmentID: 1, SegmentID: 1,
BatchRows: 100, BatchRows: 100,
SegmentLevel: "L0", SegmentLevel: "L0",
TSFrom: 1000, TSFrom: "t1",
TSTo: 2000, TSTo: "t2",
DeltaRowCount: 10, DeltaRowCount: 10,
FlushSize: 1024, FlushSize: 1024,
RunningTime: "2h", RunningTime: "2h",
@ -502,7 +503,7 @@ func TestGetChannelsJSON(t *testing.T) {
Name: "channel1", Name: "channel1",
CollectionID: 100, CollectionID: 100,
NodeID: 1, NodeID: 1,
CheckpointTS: typeutil.TimestampToString(1000), CheckpointTS: tsoutil.PhysicalTimeFormat(1000),
}, },
} }
channelsBytes, err = json.Marshal(channels) channelsBytes, err = json.Marshal(channels)
@ -678,7 +679,7 @@ func TestGetDistJSON(t *testing.T) {
cm.EXPECT().GetChannelWatchInfos().Return(map[int64]map[string]*datapb.ChannelWatchInfo{}) cm.EXPECT().GetChannelWatchInfos().Return(map[int64]map[string]*datapb.ChannelWatchInfo{})
svr.channelManager = cm svr.channelManager = cm
expectedJSON := "" expectedJSON := "{}"
actualJSON := svr.getDistJSON(ctx, req) actualJSON := svr.getDistJSON(ctx, req)
assert.Equal(t, expectedJSON, actualJSON) assert.Equal(t, expectedJSON, actualJSON)
}) })

View File

@ -1618,7 +1618,7 @@ func TestGetRecoveryInfo(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })

View File

@ -1269,7 +1269,7 @@ func TestGetRecoveryInfoV2(t *testing.T) {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })

View File

@ -69,7 +69,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -86,7 +86,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -103,7 +103,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: true, IsDeleted: true,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -120,7 +120,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -137,7 +137,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -154,7 +154,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -171,7 +171,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -188,7 +188,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "error", FailReason: "error",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -205,7 +205,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -222,7 +222,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -239,7 +239,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}) })
@ -289,7 +289,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -307,7 +307,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -325,7 +325,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: true, IsDeleted: true,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -343,7 +343,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -361,7 +361,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -379,7 +379,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -397,7 +397,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Finished, IndexState: commonpb.IndexState_Finished,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -415,7 +415,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Failed, IndexState: commonpb.IndexState_Failed,
FailReason: "error", FailReason: "error",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -433,7 +433,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_InProgress, IndexState: commonpb.IndexState_InProgress,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -451,7 +451,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -469,7 +469,7 @@ func createIndexMeta(catalog metastore.DataCoordCatalog) *indexMeta {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 1111, CreatedUTCTime: 1111,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 1, IndexSize: 1,
}, },
@ -1552,7 +1552,7 @@ func (s *taskSchedulerSuite) Test_indexTaskWithMvOptionalScalarField() {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}, },
@ -1594,7 +1594,7 @@ func (s *taskSchedulerSuite) Test_indexTaskWithMvOptionalScalarField() {
IndexState: commonpb.IndexState_Unissued, IndexState: commonpb.IndexState_Unissued,
FailReason: "", FailReason: "",
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
}) })

View File

@ -27,9 +27,6 @@ func (h *Handlers) RegisterRoutesTo(router gin.IRouter) {
router.GET("/health", wrapHandler(h.handleGetHealth)) router.GET("/health", wrapHandler(h.handleGetHealth))
router.POST("/dummy", wrapHandler(h.handleDummy)) router.POST("/dummy", wrapHandler(h.handleDummy))
router.GET("/databases", wrapHandler(h.handleListDatabases))
router.GET("/database", wrapHandler(h.handleDescribeDatabases))
router.POST("/collection", wrapHandler(h.handleCreateCollection)) router.POST("/collection", wrapHandler(h.handleCreateCollection))
router.DELETE("/collection", wrapHandler(h.handleDropCollection)) router.DELETE("/collection", wrapHandler(h.handleDropCollection))
router.GET("/collection/existence", wrapHandler(h.handleHasCollection)) router.GET("/collection/existence", wrapHandler(h.handleHasCollection))
@ -99,24 +96,6 @@ func (h *Handlers) handleDummy(c *gin.Context) (interface{}, error) {
return h.proxy.Dummy(c, &req) return h.proxy.Dummy(c, &req)
} }
func (h *Handlers) handleListDatabases(c *gin.Context) (interface{}, error) {
req := milvuspb.ListDatabasesRequest{}
err := shouldBind(c, &req)
if err != nil {
return nil, fmt.Errorf("%w: parse body failed: %v", errBadRequest, err)
}
return h.proxy.ListDatabases(c, &req)
}
func (h *Handlers) handleDescribeDatabases(c *gin.Context) (interface{}, error) {
req := milvuspb.DescribeDatabaseRequest{}
err := shouldBind(c, &req)
if err != nil {
return nil, fmt.Errorf("%w: parse body failed: %v", errBadRequest, err)
}
return h.proxy.DescribeDatabase(c, &req)
}
func (h *Handlers) handleCreateCollection(c *gin.Context) (interface{}, error) { func (h *Handlers) handleCreateCollection(c *gin.Context) (interface{}, error) {
wrappedReq := WrappedCreateCollectionRequest{} wrappedReq := WrappedCreateCollectionRequest{}
err := shouldBind(c, &wrappedReq) err := shouldBind(c, &wrappedReq)

View File

@ -456,13 +456,6 @@ func (s *Server) init() error {
} }
} }
if HTTPParams.Enabled.GetAsBool() {
registerHTTPHandlerOnce.Do(func() {
log.Info("register Proxy http server")
s.registerHTTPServer()
})
}
if s.rootCoordClient == nil { if s.rootCoordClient == nil {
var err error var err error
log.Debug("create RootCoord client for Proxy") log.Debug("create RootCoord client for Proxy")
@ -529,6 +522,13 @@ func (s *Server) init() error {
s.proxy.SetQueryCoordClient(s.queryCoordClient) s.proxy.SetQueryCoordClient(s.queryCoordClient)
log.Debug("set QueryCoord client for Proxy done") log.Debug("set QueryCoord client for Proxy done")
if HTTPParams.Enabled.GetAsBool() {
registerHTTPHandlerOnce.Do(func() {
log.Info("register Proxy http server")
s.registerHTTPServer()
})
}
log.Debug(fmt.Sprintf("update Proxy's state to %s", commonpb.StateCode_Initializing.String())) log.Debug(fmt.Sprintf("update Proxy's state to %s", commonpb.StateCode_Initializing.String()))
s.proxy.UpdateStateCode(commonpb.StateCode_Initializing) s.proxy.UpdateStateCode(commonpb.StateCode_Initializing)

View File

@ -28,6 +28,7 @@ import (
"github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/metrics"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -127,7 +128,7 @@ func (fm *fgManagerImpl) GetChannelsJSON() string {
channels = append(channels, &metricsinfo.Channel{ channels = append(channels, &metricsinfo.Channel{
Name: ch, Name: ch,
WatchState: ds.fg.Status(), WatchState: ds.fg.Status(),
LatestTimeTick: typeutil.TimestampToString(latestTimeTick), LatestTimeTick: tsoutil.PhysicalTimeFormat(latestTimeTick),
NodeID: paramtable.GetNodeID(), NodeID: paramtable.GetNodeID(),
CollectionID: ds.metacache.Collection(), CollectionID: ds.metacache.Collection(),
}) })

View File

@ -43,6 +43,7 @@ import (
"github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -182,7 +183,7 @@ func TestGetChannelsJSON(t *testing.T) {
{ {
Name: "fake-ch-_1", Name: "fake-ch-_1",
WatchState: "Healthy", WatchState: "Healthy",
LatestTimeTick: typeutil.TimestampToString(0), LatestTimeTick: tsoutil.PhysicalTimeFormat(0),
NodeID: paramtable.GetNodeID(), NodeID: paramtable.GetNodeID(),
CollectionID: 1, CollectionID: 1,
}, },

View File

@ -42,6 +42,7 @@ import (
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/retry"
"github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/timerecord"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -422,8 +423,8 @@ func (t *SyncTask) MarshalJSON() ([]byte, error) {
SegmentID: t.segmentID, SegmentID: t.segmentID,
BatchRows: t.batchRows, BatchRows: t.batchRows,
SegmentLevel: t.level.String(), SegmentLevel: t.level.String(),
TSFrom: t.tsFrom, TSFrom: tsoutil.PhysicalTimeFormat(t.tsFrom),
TSTo: t.tsTo, TSTo: tsoutil.PhysicalTimeFormat(t.tsTo),
DeltaRowCount: t.deltaRowCount, DeltaRowCount: t.deltaRowCount,
FlushSize: t.flushedSize, FlushSize: t.flushedSize,
RunningTime: t.execTime.String(), RunningTime: t.execTime.String(),

View File

@ -400,8 +400,8 @@ func (s *SyncTaskSuite) TestSyncTask_MarshalJSON() {
SegmentID: t.segmentID, SegmentID: t.segmentID,
BatchRows: t.batchRows, BatchRows: t.batchRows,
SegmentLevel: t.level.String(), SegmentLevel: t.level.String(),
TSFrom: t.tsFrom, TSFrom: tsoutil.PhysicalTimeFormat(t.tsFrom),
TSTo: t.tsTo, TSTo: tsoutil.PhysicalTimeFormat(t.tsTo),
DeltaRowCount: t.deltaRowCount, DeltaRowCount: t.deltaRowCount,
FlushSize: t.flushedSize, FlushSize: t.flushedSize,
RunningTime: t.execTime.String(), RunningTime: t.execTime.String(),

View File

@ -109,4 +109,14 @@ const (
DNSegmentsPath = "/_dn/segments" DNSegmentsPath = "/_dn/segments"
// DNChannelsPath is the path to get channels in DataNode. // DNChannelsPath is the path to get channels in DataNode.
DNChannelsPath = "/_dn/channels" DNChannelsPath = "/_dn/channels"
// DatabaseListPath is the path to get all databases.
DatabaseListPath = "/_db/list"
// DatabaseDescPath is the path to get database description.
DatabaseDescPath = "/_db/desc"
// CollectionListPath is the path to get all collections.
CollectionListPath = "/_collection/list"
// CollectionDescPath is the path to get collection description.
CollectionDescPath = "/_collection/desc"
) )

View File

@ -99,7 +99,7 @@
// handleError(error); // handleError(error);
// }); // });
fetchData(MILVUS_URI + "/databases", databases) fetchData(MILVUS_URI + "/_db/list", listDatabaseData)
.then(data => { .then(data => {
databaseData = data; databaseData = data;
renderDatabases(startPage, paginationSize) renderDatabases(startPage, paginationSize)

View File

@ -31,21 +31,11 @@ const handleError = (error) => {
// window.location.href = `5xx.html?error=${errorMessage}`; // window.location.href = `5xx.html?error=${errorMessage}`;
}; };
const fetchData = (url, localData, kvParams) => { const fetchData = (url, localData) => {
if (DEBUG_MODE) { if (DEBUG_MODE) {
return new Promise((resolve) => { return new Promise((resolve) => {
resolve(JSON.parse(localData)); resolve(JSON.parse(localData));
}); });
} else if (kvParams && kvParams.length !== 0) {
return fetch(url, {
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
mode: 'no-cors',
body: JSON.stringify(kvParams)
}).then(response => response.json())
} else { } else {
return fetch(url).then(response => { return fetch(url).then(response => {
return response.json(); return response.json();
@ -63,17 +53,3 @@ function getQueryParams() {
}); });
return params; return params;
} }
function formatTimestamp(timestamp) {
const date = new Date(timestamp); // Convert timestamp to a Date object
// Format the date components
const year = date.getFullYear();
const month = ('0' + (date.getMonth() + 1)).slice(-2); // Months are zero-indexed
const day = ('0' + date.getDate()).slice(-2);
const hours = ('0' + date.getHours()).slice(-2);
const minutes = ('0' + date.getMinutes()).slice(-2);
const seconds = ('0' + date.getSeconds()).slice(-2);
// Return formatted date string
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}`
}

View File

@ -445,38 +445,67 @@ const mconfigs = `
} }
`; `;
const collections =` const listCollectionData =`
{ {
"status": {
"error_code": "Success",
"reason": ""
},
"collection_names": [ "collection_names": [
"collection1", "collection_1",
"collection2", "collection_2",
"collection3", "collection_3",
"collection4", "collection_4",
"collection5", "collection_5",
"collection6", "collection_6",
"collection7", "collection_7",
"collection8", "collection_8",
"collection9", "collection_9",
"collection10" "collection_10"
], ],
"collection_ids": [ "collection_ids": [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10 "1",
], "2",
"created_timestamps": [ "3",
1633036800, 1633123200, 1633209600, 1633296000, 1633382400, 1633468800, 1633555200, 1633641600, 1633728000, 1633814400 "4",
"5",
"6",
"7",
"8",
"9",
"10"
], ],
"created_utc_timestamps": [ "created_utc_timestamps": [
1633036800, 1633123200, 1633209600, 1633296000, 1633382400, 1633468800, 1633555200, 1633641600, 1633728000, 1633814400 "2021-10-01 00:00:00",
"2021-10-02 00:00:00",
"2021-10-03 00:00:00",
"2021-10-04 00:00:00",
"2021-10-05 00:00:00",
"2021-10-06 00:00:00",
"2021-10-07 00:00:00",
"2021-10-08 00:00:00",
"2021-10-09 00:00:00",
"2021-10-10 00:00:00"
], ],
"inMemory_percentages": [ "inMemory_percentages": [
100, 90, 80, 70, 60, 50, 40, 30, 20, 10 100,
90,
80,
70,
60,
50,
40,
30,
20,
10
], ],
"query_service_available": [ "query_service_available": [
true, false, false, false, false, false, false, false, false, false true,
true,
true,
true,
true,
false,
false,
false,
false,
false
] ]
} }
` `
@ -556,63 +585,57 @@ const collectionRequest = `
] ]
` `
const describeCollectionResp = ` const describeCollectionData = `
{ {
"status": { "collection_id": "1",
"error_code": 0, "collection_name": "example_collection",
"reason": "Success" "created_time": "2021-10-01 00:00:00",
"shards_num": 2,
"consistency_level": "Strong",
"aliases": ["alias1", "alias2"],
"properties": {
"property_key": "property_value"
}, },
"schema": { "db_name": "example_db",
"name": "example_collection", "num_partitions": 3,
"description": "This is an example collection schema", "virtual_channel_names": ["v_channel1", "v_channel2"],
"physical_channel_names": ["p_channel1", "p_channel2"],
"partition_infos": [
{
"partition_name": "partition1",
"partition_id": "1",
"created_utc_timestamp": "2021-10-01 00:00:00"
}
],
"enable_dynamic_field": true,
"fields": [ "fields": [
{ {
"field_id": "1",
"name": "field1", "name": "field1",
"data_type": "INT64",
"is_primary_key": true, "is_primary_key": true,
"auto_id": false "description": "description1",
"data_type": "int64",
"type_params": {
"param_key": "param_value"
}, },
{ "index_params": {
"name": "field2", "index_key": "index_value"
"data_type": "FLOAT", },
"is_primary_key": false, "auto_id": false,
"auto_id": false "element_type": "element_type1",
"default_value": "default_value1",
"is_dynamic": false,
"is_partition_key": false,
"is_clustering_key": false,
"nullable": true,
"is_function_output": false
} }
] ]
},
"collectionID": 12345,
"virtual_channel_names": ["vchan1", "vchan2"],
"physical_channel_names": ["pchan1", "pchan2"],
"created_timestamp": 1633036800,
"created_utc_timestamp": 1633036800,
"shards_num": 2,
"aliases": ["alias1", "alias2"],
"start_positions": [
{
"key": "start_key",
"data": "start_data"
}
],
"consistency_level": 0,
"collection_name": "example_collection",
"properties": [
{
"key": "property_key",
"value": "property_value"
}
],
"db_name": "example_db",
"num_partitions": 1,
"db_id": 1
} }
` `
const databases = ` const listDatabaseData = `
{ {
"status": {
"error_code": "Success",
"reason": ""
},
"db_names": [ "db_names": [
"database_1", "database_1",
"database_2", "database_2",
@ -625,33 +648,38 @@ const databases = `
"database_9", "database_9",
"database_10" "database_10"
], ],
"created_timestamp": [
1633036800,
1633123200,
1633209600,
1633296000,
1633382400,
1633468800,
1633555200,
1633641600,
1633728000,
1633814400
],
"db_ids": [ "db_ids": [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10 "1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10"
],
"created_timestamps": [
"2021-10-01 00:00:00",
"2021-10-02 00:00:00",
"2021-10-03 00:00:00",
"2021-10-04 00:00:00",
"2021-10-05 00:00:00",
"2021-10-06 00:00:00",
"2021-10-07 00:00:00",
"2021-10-08 00:00:00",
"2021-10-09 00:00:00",
"2021-10-10 00:00:00"
] ]
} }
` `
const describeDatabaseResp = ` const describeDatabaseData = `
{ {
"status": {
"error_code": 0,
"reason": "Success"
},
"db_name": "example_db", "db_name": "example_db",
"dbID": 1, "db_id": 1,
"created_timestamp": 1633036800, "created_timestamp": "2021-10-01 00:00:00",
"properties": [ "properties": [
{ {
"key": "property_key", "key": "property_key",
@ -679,7 +707,7 @@ const qcCurrentTargets = `
"is_sorted": true, "is_sorted": true,
"node_id": 1, "node_id": 1,
"is_invisible": false, "is_invisible": false,
"loaded_timestamp": 1633072800, "loaded_timestamp": "2021-10-01 00:00:00",
"index": [ "index": [
{ {
"field_id": 1, "field_id": 1,
@ -701,20 +729,17 @@ const qcCurrentTargets = `
"collection_id": 1, "collection_id": 1,
"channel_name": "channel1", "channel_name": "channel1",
"unflushed_segment_ids": [ "unflushed_segment_ids": [
1 "1"
], ],
"flushed_segment_ids": [ "flushed_segment_ids": [
2 "2"
], ],
"dropped_segment_ids": [ "dropped_segment_ids": [
3 "3"
], ],
"level_zero_segment_ids": [ "level_zero_segment_ids": [
4 "4"
], ]
"partition_stats_versions": {
"1": 1
}
} }
] ]
} }
@ -774,7 +799,7 @@ const qcDist = `
"is_sorted": true, "is_sorted": true,
"node_id": 1, "node_id": 1,
"is_invisible": false, "is_invisible": false,
"loaded_timestamp": 1633072800, "loaded_timestamp": "2021-10-01 00:00:00",
"index": [ "index": [
{ {
"field_id": 1, "field_id": 1,
@ -798,15 +823,15 @@ const qcDist = `
"version": 1, "version": 1,
"collection_id": 1, "collection_id": 1,
"channel_name": "channel1", "channel_name": "channel1",
"unflushed_segment_ids": [1], "unflushed_segment_ids": ["1"],
"flushed_segment_ids": [2], "flushed_segment_ids": ["2"],
"dropped_segment_ids": [3], "dropped_segment_ids": ["3"],
"level_zero_segment_ids": [4], "level_zero_segment_ids": ["4"],
"partition_stats_versions": { "partition_stats_versions": {
"1": 1 "1": 1
}, },
"watch_state": "Healthy", "watch_state": "Healthy",
"start_watch_ts": 1633072800 "start_watch_ts": "2021-10-01 00:00:00"
} }
], ],
"leader_views": [ "leader_views": [
@ -830,7 +855,7 @@ const qcDist = `
"is_sorted": true, "is_sorted": true,
"node_id": 1, "node_id": 1,
"is_invisible": false, "is_invisible": false,
"loaded_timestamp": 1633072800, "loaded_timestamp": "2021-10-01 00:00:00",
"index": [ "index": [
{ {
"field_id": 1, "field_id": 1,
@ -958,7 +983,7 @@ const qnSegments = `
"is_sorted": true, "is_sorted": true,
"node_id": 1, "node_id": 1,
"is_invisible": false, "is_invisible": false,
"loaded_timestamp": 1620000000, "loaded_timestamp": "2021-10-01 00:00:00",
"index": [ "index": [
{ {
"field_id": 1, "field_id": 1,
@ -985,7 +1010,7 @@ const qnSegments = `
"is_sorted": true, "is_sorted": true,
"node_id": 2, "node_id": 2,
"is_invisible": false, "is_invisible": false,
"loaded_timestamp": 1620000001, "loaded_timestamp": "2021-10-01 00:00:00",
"index": [ "index": [
{ {
"field_id": 2, "field_id": 2,
@ -1059,22 +1084,22 @@ const dc_dist = `
"version": 1, "version": 1,
"collection_id": 100, "collection_id": 100,
"channel_name": "channel1", "channel_name": "channel1",
"unflushed_segment_ids": [1, 2, 3], "unflushed_segment_ids": ["1", "2", "3"],
"flushed_segment_ids": [4, 5, 6], "flushed_segment_ids": ["4", "5", "6"],
"dropped_segment_ids": [7, 8, 9], "dropped_segment_ids": ["7", "8", "9"],
"watch_state": "success", "watch_state": "success",
"start_watch_ts": 123456789 "start_watch_ts": "2023-10-01 12:05:00"
}, },
{ {
"node_id": 1, "node_id": 1,
"version": 1, "version": 1,
"collection_id": 100, "collection_id": 100,
"channel_name": "channel3", "channel_name": "channel3",
"unflushed_segment_ids": [1, 2, 3], "unflushed_segment_ids": ["1", "2", "3"],
"flushed_segment_ids": [4, 5, 6], "flushed_segment_ids": ["4", "5", "6"],
"dropped_segment_ids": [7, 8, 9], "dropped_segment_ids": ["7", "8", "9"],
"watch_state": "to_watch", "watch_state": "to_watch",
"start_watch_ts": 123456789 "start_watch_ts": "2023-10-01 12:05:00"
} }
] ]
} }
@ -1090,7 +1115,7 @@ const dc_build_index_task = `
"index_state": "Finished", "index_state": "Finished",
"index_size": 1024, "index_size": 1024,
"index_version": 1, "index_version": 1,
"create_time": 1633036800 "create_time": "2023-10-01 12:05:00"
}, },
{ {
"index_id": 2, "index_id": 2,
@ -1101,7 +1126,7 @@ const dc_build_index_task = `
"fail_reason": "Disk full", "fail_reason": "Disk full",
"index_size": 2048, "index_size": 2048,
"index_version": 2, "index_version": 2,
"create_time": 1633123200 "create_time": "2023-10-01 12:05:00"
} }
]` ]`
@ -1113,11 +1138,11 @@ const dc_compaction_task = `
"type": "Merge", "type": "Merge",
"state": "Completed", "state": "Completed",
"fail_reason": "", "fail_reason": "",
"start_time": 1620000000, "start_time": "2023-10-01 12:05:00",
"end_time": 1620003600, "end_time": "2023-10-01 12:06:00",
"total_rows": 10000, "total_rows": 10000,
"input_segments": [1, 2, 3], "input_segments": ["1", "2", "3"],
"result_segments": [4] "result_segments": ["4"]
}, },
{ {
"plan_id": 2, "plan_id": 2,
@ -1125,10 +1150,10 @@ const dc_compaction_task = `
"type": "Merge", "type": "Merge",
"state": "Failed", "state": "Failed",
"fail_reason": "Disk full", "fail_reason": "Disk full",
"start_time": 1620007200, "start_time": "2023-10-01 12:05:00",
"end_time": 1620010800, "end_time": "2023-10-01 12:06:00",
"total_rows": 20000, "total_rows": 20000,
"input_segments": [5, 6, 7], "input_segments": ["5", "6", "7"],
"result_segments": [] "result_segments": []
} }
]` ]`
@ -1139,8 +1164,8 @@ const dn_sync_task = `
"segment_id": 1, "segment_id": 1,
"batch_rows": 1000, "batch_rows": 1000,
"segment_level": "L1", "segment_level": "L1",
"ts_from": 1633036800, "ts_from": "2023-10-01 12:05:00",
"ts_to": 1633040400, "ts_to": "2023-10-01 12:06:00",
"delta_row_count": 10, "delta_row_count": 10,
"flush_size": 1024, "flush_size": 1024,
"running_time": "100000000", "running_time": "100000000",
@ -1150,8 +1175,8 @@ const dn_sync_task = `
"segment_id": 2, "segment_id": 2,
"batch_rows": 2000, "batch_rows": 2000,
"segment_level": "L2", "segment_level": "L2",
"ts_from": 1633123200, "ts_from": "2023-10-01 12:05:00",
"ts_to": 1633126800, "ts_to": "2023-10-01 12:06:00",
"delta_row_count": 20, "delta_row_count": 20,
"flush_size": 2048, "flush_size": 2048,
"running_time": "200000000", "running_time": "200000000",

View File

@ -126,7 +126,7 @@ function renderDatabases(currentPage, rowsPerPage) {
tableHTML += '<tr>'; tableHTML += '<tr>';
tableHTML += `<td><a href="#" onclick="describeDatabase('${databaseData.db_names[i]}', ${i}, 'list-db')">${databaseData.db_names[i]}</a></td>`; tableHTML += `<td><a href="#" onclick="describeDatabase('${databaseData.db_names[i]}', ${i}, 'list-db')">${databaseData.db_names[i]}</a></td>`;
tableHTML += `<td>${databaseData.db_ids? databaseData.db_ids[i] : 0}</td>`; tableHTML += `<td>${databaseData.db_ids? databaseData.db_ids[i] : 0}</td>`;
tableHTML += `<td>${databaseData.created_timestamp? formatTimestamp(databaseData.created_timestamp[i]) : ''}</td>`; tableHTML += `<td>${databaseData.created_timestamps[i]}</td>`;
tableHTML += '</tr>'; tableHTML += '</tr>';
// Hidden row for displaying collection details as JSON // Hidden row for displaying collection details as JSON
@ -165,7 +165,7 @@ function renderDatabases(currentPage, rowsPerPage) {
} }
function describeDatabase(databaseName, rowIndex, type) { function describeDatabase(databaseName, rowIndex, type) {
fetchData(`${MILVUS_URI}/database?db_name=${databaseName}`, describeDatabaseResp) fetchData(`${MILVUS_URI}/_db/desc?db_name=${databaseName}`, describeDatabaseData)
.then(data => { .then(data => {
// Format data as JSON and insert into the designated row // Format data as JSON and insert into the designated row
const jsonFormattedData = JSON.stringify(data, null, 2); const jsonFormattedData = JSON.stringify(data, null, 2);
@ -182,7 +182,7 @@ function describeDatabase(databaseName, rowIndex, type) {
} }
function describeCollection(databaseName, collectionName, rowIndex, type) { function describeCollection(databaseName, collectionName, rowIndex, type) {
fetchData(`${MILVUS_URI}/collection?db_name${databaseName}&&collection_name=${collectionName}`, describeCollectionResp) fetchData(`${MILVUS_URI}/_collection/desc?db_name=${databaseName}&&collection_name=${collectionName}`, describeCollectionData)
.then(data => { .then(data => {
// Format data as JSON and insert into the designated row // Format data as JSON and insert into the designated row
const jsonFormattedData = JSON.stringify(data, null, 2); const jsonFormattedData = JSON.stringify(data, null, 2);
@ -199,7 +199,7 @@ function describeCollection(databaseName, collectionName, rowIndex, type) {
} }
function fetchCollections(databaseName) { function fetchCollections(databaseName) {
fetchData(MILVUS_URI + `/collections?db_name=${databaseName}`, collections ) fetchData(MILVUS_URI + `/_collection/list?db_name=${databaseName}`, listCollectionData )
.then(data => { .then(data => {
collectionsData = data; collectionsData = data;
renderCollections(databaseName, startPage, paginationSize) renderCollections(databaseName, startPage, paginationSize)
@ -212,7 +212,7 @@ function fetchCollections(databaseName) {
let collectionsData = null; // Global variable to store fetched data let collectionsData = null; // Global variable to store fetched data
function renderCollections(databaseName, currentPage, rowsPerPage) { function renderCollections(databaseName, currentPage, rowsPerPage) {
let data = collectionsData; let data = collectionsData;
if (!data) { if (!data || !data.collection_names) {
console.error('No collections data available'); console.error('No collections data available');
return; return;
} }
@ -229,12 +229,11 @@ function renderCollections(databaseName, currentPage, rowsPerPage) {
const start = currentPage * rowsPerPage; const start = currentPage * rowsPerPage;
const end = start + rowsPerPage; const end = start + rowsPerPage;
const totalCount = data.collection_names.length; const totalCount = data.collection_names.length;
console.log(data)
for (let i = start; i < end && i < totalCount; i++) { for (let i = start; i < end && i < totalCount; i++) {
tableHTML += '<tr>'; tableHTML += '<tr>';
tableHTML += `<td><a href="#" onclick="describeCollection('${databaseName}', '${data.collection_names[i]}', ${i}, 'list-coll')">${data.collection_names[i]}</a></td>`; tableHTML += `<td><a href="#" onclick="describeCollection('${databaseName}', '${data.collection_names[i]}', ${i}, 'list-coll')">${data.collection_names[i]}</a></td>`;
tableHTML += `<td>${data.collection_ids[i]}</td>`; tableHTML += `<td>${data.collection_ids[i]}</td>`;
tableHTML += `<td>${formatTimestamp(data.created_utc_timestamps[i])}</td>`; tableHTML += `<td>${data.created_utc_timestamps[i]}</td>`;
tableHTML += `<td>${data.inMemory_percentages? data.inMemory_percentages[i]: 'unknown'}</td>`; tableHTML += `<td>${data.inMemory_percentages? data.inMemory_percentages[i]: 'unknown'}</td>`;
tableHTML += `<td>${data.query_service_available? data.query_service_available[i] ? 'Yes' : 'No' : 'No'}</td>`; tableHTML += `<td>${data.query_service_available? data.query_service_available[i] ? 'Yes' : 'No' : 'No'}</td>`;
tableHTML += '</tr>'; tableHTML += '</tr>';
@ -509,7 +508,7 @@ function renderDependencies(data) {
const tr = ` const tr = `
<tr> <tr>
<td><strong>${key === 'metastore'? 'metastore [' + row['meta_type'] + ']' : 'mq [' + row['mq_type'] + ']'} </strong> </td> <td><strong>${key === 'metastore'? 'metastore [' + row['meta_type'] + ']' : 'mq [' + row['mq_type'] + ']'} </strong> </td>
<td>${row['health_status']? 'Healthy' : 'Unhealthy:' + row['unhealthy_reason']}</td> <td>${row['health_status']? 'Healthy' : row['unhealthy_reason']}</td>
<td>${row['members_health']? row['members_health'].map(member => ` <td>${row['members_health']? row['members_health'].map(member => `
<ul> <ul>
<li>Endpoint: ${member.endpoint}, Health: ${member.health ? "Healthy" : "Unhealthy"}</li> <li>Endpoint: ${member.endpoint}, Health: ${member.health ? "Healthy" : "Unhealthy"}</li>
@ -593,7 +592,7 @@ function renderBuildIndexTasks(data) {
tableHTML += `<td>${indexState}</td>`; tableHTML += `<td>${indexState}</td>`;
tableHTML += `<td>${task.index_size}</td>`; tableHTML += `<td>${task.index_size}</td>`;
tableHTML += `<td>${task.index_version}</td>`; tableHTML += `<td>${task.index_version}</td>`;
tableHTML += `<td>${new Date(task.create_time * 1000).toLocaleString()}</td>`; tableHTML += `<td>${task.create_time}</td>`;
tableHTML += '</tr>'; tableHTML += '</tr>';
}); });
@ -636,8 +635,8 @@ function renderCompactionTasks(data) {
tableHTML += `<td>${new Date(task.start_time * 1000).toLocaleString()}</td>`; tableHTML += `<td>${new Date(task.start_time * 1000).toLocaleString()}</td>`;
tableHTML += `<td>${new Date(task.end_time * 1000).toLocaleString()}</td>`; tableHTML += `<td>${new Date(task.end_time * 1000).toLocaleString()}</td>`;
tableHTML += `<td>${task.total_rows}</td>`; tableHTML += `<td>${task.total_rows}</td>`;
tableHTML += `<td>${task.input_segments.join(', ')}</td>`; tableHTML += `<td>${task.input_segments? task.input_segments.join(', '): ''}</td>`;
tableHTML += `<td>${task.result_segments.join(', ')}</td>`; tableHTML += `<td>${task.result_segments? task.result_segments.join(', '): ''}</td>`;
tableHTML += '</tr>'; tableHTML += '</tr>';
}); });
@ -677,8 +676,8 @@ function renderImportTasks(data) {
tableHTML += `<td>${task.node_id}</td>`; tableHTML += `<td>${task.node_id}</td>`;
tableHTML += `<td>${state}</td>`; tableHTML += `<td>${state}</td>`;
tableHTML += `<td>${task.task_type}</td>`; tableHTML += `<td>${task.task_type}</td>`;
tableHTML += `<td>${new Date(task.created_time).toLocaleString()}</td>`; tableHTML += `<td>${task.created_time}</td>`;
tableHTML += `<td>${new Date(task.complete_time).toLocaleString()}</td>`; tableHTML += `<td>${task.complete_time}</td>`;
tableHTML += '</tr>'; tableHTML += '</tr>';
}); });
@ -711,8 +710,8 @@ function renderSyncTasks(data) {
tableHTML += `<td>${task.segment_id}</td>`; tableHTML += `<td>${task.segment_id}</td>`;
tableHTML += `<td>${task.batch_rows}</td>`; tableHTML += `<td>${task.batch_rows}</td>`;
tableHTML += `<td>${task.segment_level}</td>`; tableHTML += `<td>${task.segment_level}</td>`;
tableHTML += `<td>${new Date(task.ts_from * 1000).toLocaleString()}</td>`; tableHTML += `<td>${task.ts_from}</td>`;
tableHTML += `<td>${new Date(task.ts_to * 1000).toLocaleString()}</td>`; tableHTML += `<td>${task.ts_to}</td>`;
tableHTML += `<td>${task.delta_row_count}</td>`; tableHTML += `<td>${task.delta_row_count}</td>`;
tableHTML += `<td>${task.flush_size}</td>`; tableHTML += `<td>${task.flush_size}</td>`;
tableHTML += `<td>${task.running_time}</td>`; tableHTML += `<td>${task.running_time}</td>`;
@ -797,7 +796,7 @@ function renderChannels(channels, currentPage, rowsPerPage) {
<td>${channel.watch_state || "N/A"}</td> <td>${channel.watch_state || "N/A"}</td>
<td>${channel.node_id}</td> <td>${channel.node_id}</td>
<td>${channel.latest_time_tick || "N/A"}</td> <td>${channel.latest_time_tick || "N/A"}</td>
<td>${formatTimestamp(channel.start_watch_ts) || "N/A"}</td> <td>${channel.start_watch_ts || "N/A"}</td>
<td>${channel.check_point_ts || "N/A"}</td> <td>${channel.check_point_ts || "N/A"}</td>
`; `;
table.appendChild(row); table.appendChild(row);
@ -1114,7 +1113,7 @@ function renderSlowQueries(data) {
tableHTML += `<td>${query.time}</td>`; tableHTML += `<td>${query.time}</td>`;
tableHTML += `<td>${query.trace_id}</td>`; tableHTML += `<td>${query.trace_id}</td>`;
tableHTML += `<td>${query.type}</td>`; tableHTML += `<td>${query.type}</td>`;
tableHTML += `<td>${query.user}</td>`; tableHTML += `<td>${query.user || 'unknown'}</td>`;
tableHTML += `<td>${query.database}</td>`; tableHTML += `<td>${query.database}</td>`;
tableHTML += `<td>${query.collection}</td>`; tableHTML += `<td>${query.collection}</td>`;
tableHTML += `<td>${JSON.stringify(query.query_params)}</td>`; tableHTML += `<td>${JSON.stringify(query.query_params)}</td>`;
@ -1406,7 +1405,7 @@ function renderQueryChannels(channels, currentPage, rowsPerPage) {
<td>${channel.collection_id}</td> <td>${channel.collection_id}</td>
<td>${channel.leader_id || 'Not Found'}</td> <td>${channel.leader_id || 'Not Found'}</td>
<td>${channel.node_id}</td> <td>${channel.node_id}</td>
<td>${channel.watch_state}</td> <td>${channel.watch_state||''}</td>
<td>${channel.from}</td> <td>${channel.from}</td>
</tr> </tr>
`; `;

View File

@ -27,12 +27,12 @@
<!-- Centered Links Section --> <!-- Centered Links Section -->
<div class="row text-center mb-3"> <div class="row text-center mb-3">
<div class="col"> <div class="col">
<a href="#link1" class="btn btn-link" style="font-size: 1.5em;">Pprof</a> <a href="http://localhost:9091/debug/pprof" class="btn btn-link" style="font-size: 1.5em;">Pprof</a>
</div> </div>
</div> </div>
<div class="row text-center mb-3"> <div class="row text-center mb-3">
<div class="col"> <div class="col">
<a href="#link2" class="btn btn-link" style="font-size: 1.5em;">Memory Data Visualization</a> <a href="http://localhost:9091/static" class="btn btn-link" style="font-size: 1.5em;">Memory Data Visualization</a>
</div> </div>
</div> </div>
</div> </div>

View File

@ -1020,7 +1020,7 @@ func TestCatalog_CreateSegmentIndex(t *testing.T) {
FailReason: "", FailReason: "",
IndexVersion: 0, IndexVersion: 0,
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
} }
@ -1116,7 +1116,7 @@ func TestCatalog_AlterSegmentIndexes(t *testing.T) {
FailReason: "", FailReason: "",
IndexVersion: 0, IndexVersion: 0,
IsDeleted: false, IsDeleted: false,
CreateTime: 0, CreatedUTCTime: 0,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
} }

View File

@ -102,7 +102,7 @@ func MarshalIndexModel(index *Index) *indexpb.FieldIndex {
// } // }
// //
// newIdx.IsDeleted = b.IsDeleted // newIdx.IsDeleted = b.IsDeleted
// newIdx.CreateTime = b.CreateTime // newIdx.CreatedUTCTime = b.CreatedUTCTime
// //
// if newIdx.Extra == nil && b.Extra != nil { // if newIdx.Extra == nil && b.Extra != nil {
// newIdx.Extra = b.Extra // newIdx.Extra = b.Extra

View File

@ -18,7 +18,7 @@ type SegmentIndex struct {
IndexState commonpb.IndexState IndexState commonpb.IndexState
FailReason string FailReason string
IsDeleted bool IsDeleted bool
CreateTime uint64 CreatedUTCTime uint64
IndexFileKeys []string IndexFileKeys []string
IndexSize uint64 IndexSize uint64
// deprecated // deprecated
@ -44,7 +44,7 @@ func UnmarshalSegmentIndexModel(segIndex *indexpb.SegmentIndex) *SegmentIndex {
FailReason: segIndex.FailReason, FailReason: segIndex.FailReason,
IndexVersion: segIndex.IndexVersion, IndexVersion: segIndex.IndexVersion,
IsDeleted: segIndex.Deleted, IsDeleted: segIndex.Deleted,
CreateTime: segIndex.CreateTime, CreatedUTCTime: segIndex.CreateTime,
IndexFileKeys: common.CloneStringList(segIndex.IndexFileKeys), IndexFileKeys: common.CloneStringList(segIndex.IndexFileKeys),
IndexSize: segIndex.SerializeSize, IndexSize: segIndex.SerializeSize,
WriteHandoff: segIndex.WriteHandoff, WriteHandoff: segIndex.WriteHandoff,
@ -70,7 +70,7 @@ func MarshalSegmentIndexModel(segIdx *SegmentIndex) *indexpb.SegmentIndex {
IndexVersion: segIdx.IndexVersion, IndexVersion: segIdx.IndexVersion,
IndexFileKeys: common.CloneStringList(segIdx.IndexFileKeys), IndexFileKeys: common.CloneStringList(segIdx.IndexFileKeys),
Deleted: segIdx.IsDeleted, Deleted: segIdx.IsDeleted,
CreateTime: segIdx.CreateTime, CreateTime: segIdx.CreatedUTCTime,
SerializeSize: segIdx.IndexSize, SerializeSize: segIdx.IndexSize,
WriteHandoff: segIdx.WriteHandoff, WriteHandoff: segIdx.WriteHandoff,
CurrentIndexVersion: segIdx.CurrentIndexVersion, CurrentIndexVersion: segIdx.CurrentIndexVersion,
@ -90,7 +90,7 @@ func CloneSegmentIndex(segIndex *SegmentIndex) *SegmentIndex {
FailReason: segIndex.FailReason, FailReason: segIndex.FailReason,
IndexVersion: segIndex.IndexVersion, IndexVersion: segIndex.IndexVersion,
IsDeleted: segIndex.IsDeleted, IsDeleted: segIndex.IsDeleted,
CreateTime: segIndex.CreateTime, CreatedUTCTime: segIndex.CreatedUTCTime,
IndexFileKeys: common.CloneStringList(segIndex.IndexFileKeys), IndexFileKeys: common.CloneStringList(segIndex.IndexFileKeys),
IndexSize: segIndex.IndexSize, IndexSize: segIndex.IndexSize,
WriteHandoff: segIndex.WriteHandoff, WriteHandoff: segIndex.WriteHandoff,

View File

@ -42,7 +42,7 @@ var (
FailReason: "", FailReason: "",
IndexVersion: 0, IndexVersion: 0,
IsDeleted: false, IsDeleted: false,
CreateTime: 1, CreatedUTCTime: 1,
IndexFileKeys: nil, IndexFileKeys: nil,
IndexSize: 0, IndexSize: 0,
} }

View File

@ -19,20 +19,33 @@ package proxy
import ( import (
"encoding/json" "encoding/json"
"net/http" "net/http"
"strconv"
"strings" "strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/samber/lo"
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
"github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
mhttp "github.com/milvus-io/milvus/internal/http" mhttp "github.com/milvus-io/milvus/internal/http"
"github.com/milvus-io/milvus/internal/proxy/connection" "github.com/milvus-io/milvus/internal/proxy/connection"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/etcd"
"github.com/milvus-io/milvus/pkg/util/funcutil"
"github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil"
) )
var contentType = "application/json" var (
contentType = "application/json"
defaultDB = "default"
httpDBName = "db_name"
HTTPCollectionName = "collection_name"
)
func getConfigs(configs map[string]string) gin.HandlerFunc { func getConfigs(configs map[string]string) gin.HandlerFunc {
return func(c *gin.Context) { return func(c *gin.Context) {
@ -192,3 +205,196 @@ func getDataComponentMetrics(node *Proxy, metricsType string) gin.HandlerFunc {
c.Data(http.StatusOK, contentType, []byte(resp.GetResponse())) c.Data(http.StatusOK, contentType, []byte(resp.GetResponse()))
} }
} }
// The Get request should be used to get the query parameters, not the body, such as Javascript
// fetch API only support GET request with query parameter.
func listCollection(node types.ProxyComponent) gin.HandlerFunc {
return func(c *gin.Context) {
dbName := c.Query(httpDBName)
if len(dbName) == 0 {
dbName = defaultDB
}
showCollectionResp, err := node.ShowCollections(c, &milvuspb.ShowCollectionsRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_ShowCollections,
},
DbName: dbName,
})
if err := merr.CheckRPCCall(showCollectionResp, err); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
// Convert the response to Collections struct
collections := &metricsinfo.Collections{
CollectionIDs: lo.Map(showCollectionResp.CollectionIds, func(t int64, i int) string {
return strconv.FormatInt(t, 10)
}),
CollectionNames: showCollectionResp.CollectionNames,
CreatedUtcTimestamps: lo.Map(showCollectionResp.CreatedUtcTimestamps, func(t uint64, i int) string {
return typeutil.TimestampToString(t)
}),
InMemoryPercentages: lo.Map(showCollectionResp.InMemoryPercentages, func(t int64, i int) int {
return int(t)
}),
QueryServiceAvailable: showCollectionResp.QueryServiceAvailable,
}
// Marshal the collections struct to JSON
collectionsJSON, err := json.Marshal(collections)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
c.Data(http.StatusOK, contentType, collectionsJSON)
}
}
func describeCollection(node types.ProxyComponent, rootCoord types.RootCoordClient) gin.HandlerFunc {
return func(c *gin.Context) {
dbName := c.Query(httpDBName)
collectionName := c.Query(HTTPCollectionName)
if len(dbName) == 0 {
dbName = defaultDB
}
if len(collectionName) == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{
mhttp.HTTPReturnMessage: HTTPCollectionName + " is required",
})
return
}
describeCollectionResp, err := node.DescribeCollection(c, &milvuspb.DescribeCollectionRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_DescribeCollection,
},
DbName: dbName,
CollectionName: collectionName,
})
if err := merr.CheckRPCCall(describeCollectionResp, err); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
describePartitionResp, err := rootCoord.ShowPartitions(c, &milvuspb.ShowPartitionsRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_ShowPartitions,
},
DbName: dbName,
CollectionName: collectionName,
})
if err := merr.CheckRPCCall(describePartitionResp, err); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
// Convert the response to Collection struct
collection := &metricsinfo.Collection{
CollectionID: strconv.FormatInt(describeCollectionResp.CollectionID, 10),
CollectionName: describeCollectionResp.CollectionName,
CreatedTime: tsoutil.PhysicalTimeFormat(describeCollectionResp.CreatedUtcTimestamp),
ShardsNum: int(describeCollectionResp.ShardsNum),
ConsistencyLevel: describeCollectionResp.ConsistencyLevel.String(),
Aliases: describeCollectionResp.Aliases,
Properties: funcutil.KeyValuePair2Map(describeCollectionResp.Properties),
DBName: dbName,
NumPartitions: int(describeCollectionResp.NumPartitions),
VirtualChannelNames: describeCollectionResp.VirtualChannelNames,
PhysicalChannelNames: describeCollectionResp.PhysicalChannelNames,
PartitionInfos: metricsinfo.NewPartitionInfos(describePartitionResp),
EnableDynamicField: describeCollectionResp.Schema.EnableDynamicField,
Fields: metricsinfo.NewFields(describeCollectionResp.GetSchema()),
}
// Marshal the collection struct to JSON
collectionJSON, err := json.Marshal(collection)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
c.Data(http.StatusOK, contentType, collectionJSON)
}
}
func listDatabase(node types.ProxyComponent) gin.HandlerFunc {
return func(c *gin.Context) {
showDatabaseResp, err := node.ListDatabases(c, &milvuspb.ListDatabasesRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_ListDatabases,
},
})
if err := merr.CheckRPCCall(showDatabaseResp, err); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
// Convert the response to Databases struct
databases := metricsinfo.NewDatabases(showDatabaseResp)
// Marshal the databases struct to JSON
databasesJSON, err := json.Marshal(databases)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
c.Data(http.StatusOK, contentType, databasesJSON)
}
}
func describeDatabase(node types.ProxyComponent) gin.HandlerFunc {
return func(c *gin.Context) {
dbName := c.Query(httpDBName)
if len(dbName) == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{
mhttp.HTTPReturnMessage: httpDBName + " is required",
})
return
}
describeDatabaseResp, err := node.DescribeDatabase(c, &milvuspb.DescribeDatabaseRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_DescribeDatabase,
},
DbName: dbName,
})
if err := merr.CheckRPCCall(describeDatabaseResp, err); err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
// Convert the response to Database struct
database := metricsinfo.NewDatabase(describeDatabaseResp)
// Marshal the database struct to JSON
databaseJSON, err := json.Marshal(database)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
mhttp.HTTPReturnMessage: err.Error(),
})
return
}
c.Data(http.StatusOK, contentType, databaseJSON)
}
}

View File

@ -13,6 +13,7 @@ import (
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
"github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
"github.com/milvus-io/milvus/internal/mocks" "github.com/milvus-io/milvus/internal/mocks"
"github.com/milvus-io/milvus/internal/proxy/connection" "github.com/milvus-io/milvus/internal/proxy/connection"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
@ -151,3 +152,210 @@ func TestGetDataComponentMetrics(t *testing.T) {
assert.Contains(t, w.Body.String(), "test_response") assert.Contains(t, w.Body.String(), "test_response")
}) })
} }
func TestListCollection(t *testing.T) {
t.Run("list collections successfully", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=default", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().ShowCollections(mock.Anything, mock.Anything).Return(&milvuspb.ShowCollectionsResponse{
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success},
CollectionIds: []int64{1, 2},
CollectionNames: []string{"collection1", "collection2"},
CreatedUtcTimestamps: []uint64{1633046400000, 1633132800000},
InMemoryPercentages: []int64{100, 100},
QueryServiceAvailable: []bool{true, true},
}, nil)
handler := listCollection(mockProxy)
handler(c)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "collection1")
assert.Contains(t, w.Body.String(), "collection2")
})
t.Run("list collections with error", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=default", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().ShowCollections(mock.Anything, mock.Anything).Return(nil, errors.New("error"))
handler := listCollection(mockProxy)
handler(c)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "error")
})
}
func TestDescribeCollection(t *testing.T) {
t.Run("describe collection successfully", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=default&collection_name=collection1", nil)
mockProxy := mocks.NewMockProxy(t)
mockRootCoord := mocks.NewMockRootCoordClient(t)
mockProxy.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success},
CollectionID: 1,
CollectionName: "collection1",
CreatedUtcTimestamp: 1633046400000,
ShardsNum: 2,
ConsistencyLevel: commonpb.ConsistencyLevel_Strong,
Aliases: []string{"alias1"},
Properties: []*commonpb.KeyValuePair{{Key: "key", Value: "value"}},
VirtualChannelNames: []string{"vchan1"},
PhysicalChannelNames: []string{"pchan1"},
NumPartitions: 1,
Schema: &schemapb.CollectionSchema{
EnableDynamicField: true,
Fields: []*schemapb.FieldSchema{
{
FieldID: 1,
Name: "field1",
DataType: schemapb.DataType_Int32,
},
},
},
}, nil)
mockRootCoord.EXPECT().ShowPartitions(mock.Anything, mock.Anything).Return(&milvuspb.ShowPartitionsResponse{
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success},
}, nil)
handler := describeCollection(mockProxy, mockRootCoord)
handler(c)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "collection1")
assert.Contains(t, w.Body.String(), "alias1")
})
t.Run("describe collection with error", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=default&collection_name=collection1", nil)
mockProxy := mocks.NewMockProxy(t)
mockRootCoord := mocks.NewMockRootCoordClient(t)
mockProxy.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(nil, errors.New("error"))
handler := describeCollection(mockProxy, mockRootCoord)
handler(c)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "error")
})
t.Run("missing collection_name", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=default", nil)
mockProxy := mocks.NewMockProxy(t)
mockRootCoord := mocks.NewMockRootCoordClient(t)
handler := describeCollection(mockProxy, mockRootCoord)
handler(c)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "collection_name is required")
})
}
func TestListDatabase(t *testing.T) {
t.Run("list databases successfully", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().ListDatabases(mock.Anything, mock.Anything).Return(&milvuspb.ListDatabasesResponse{
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success},
DbNames: []string{"db1", "db2"},
CreatedTimestamp: []uint64{1633046400000, 1633132800000},
}, nil)
handler := listDatabase(mockProxy)
handler(c)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "db1")
assert.Contains(t, w.Body.String(), "db2")
})
t.Run("list databases with error", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().ListDatabases(mock.Anything, mock.Anything).Return(nil, errors.New("error"))
handler := listDatabase(mockProxy)
handler(c)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "error")
})
}
func TestDescribeDatabase(t *testing.T) {
t.Run("describe database successfully", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=db1", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().DescribeDatabase(mock.Anything, mock.Anything).Return(&milvuspb.DescribeDatabaseResponse{
Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success},
DbName: "db1",
DbID: 1,
CreatedTimestamp: 1633046400000,
Properties: []*commonpb.KeyValuePair{{Key: "key", Value: "value"}},
}, nil)
handler := describeDatabase(mockProxy)
handler(c)
assert.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "db1")
assert.Contains(t, w.Body.String(), "key")
assert.Contains(t, w.Body.String(), "value")
})
t.Run("describe database with error", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/?db_name=db1", nil)
mockProxy := mocks.NewMockProxy(t)
mockProxy.EXPECT().DescribeDatabase(mock.Anything, mock.Anything).Return(nil, errors.New("error"))
handler := describeDatabase(mockProxy)
handler(c)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "error")
})
t.Run("missing db_name", func(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "/", nil)
mockProxy := mocks.NewMockProxy(t)
handler := describeDatabase(mockProxy)
handler(c)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "db_name is required")
})
}

View File

@ -6534,6 +6534,14 @@ func (node *Proxy) RegisterRestRouter(router gin.IRouter) {
router.GET(http.DNSyncTasksPath, getDataComponentMetrics(node, metricsinfo.SyncTasks)) router.GET(http.DNSyncTasksPath, getDataComponentMetrics(node, metricsinfo.SyncTasks))
router.GET(http.DNSegmentsPath, getDataComponentMetrics(node, metricsinfo.DataSegments)) router.GET(http.DNSegmentsPath, getDataComponentMetrics(node, metricsinfo.DataSegments))
router.GET(http.DNChannelsPath, getDataComponentMetrics(node, metricsinfo.DataChannels)) router.GET(http.DNChannelsPath, getDataComponentMetrics(node, metricsinfo.DataChannels))
// Database requests
router.GET(http.DatabaseListPath, listDatabase(node))
router.GET(http.DatabaseDescPath, describeDatabase(node))
// Collection requests
router.GET(http.CollectionListPath, listCollection(node))
router.GET(http.CollectionDescPath, describeCollection(node, node.rootCoord))
} }
func (node *Proxy) CreatePrivilegeGroup(ctx context.Context, req *milvuspb.CreatePrivilegeGroupRequest) (*commonpb.Status, error) { func (node *Proxy) CreatePrivilegeGroup(ctx context.Context, req *milvuspb.CreatePrivilegeGroupRequest) (*commonpb.Status, error) {

View File

@ -187,7 +187,7 @@ func TestAlterDatabase(t *testing.T) {
assert.Nil(t, err) assert.Nil(t, err)
} }
func TestDescribeDatabase(t *testing.T) { func TestDescribeDatabaseTask(t *testing.T) {
rc := mocks.NewMockRootCoordClient(t) rc := mocks.NewMockRootCoordClient(t)
rc.EXPECT().DescribeDatabase(mock.Anything, mock.Anything).Return(&rootcoordpb.DescribeDatabaseResponse{}, nil) rc.EXPECT().DescribeDatabase(mock.Anything, mock.Anything).Return(&rootcoordpb.DescribeDatabaseResponse{}, nil)

View File

@ -26,6 +26,7 @@ import (
"github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/proto/querypb"
"github.com/milvus-io/milvus/internal/util/metrics" "github.com/milvus-io/milvus/internal/util/metrics"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -135,7 +136,7 @@ func SegmentFromInfo(info *datapb.SegmentInfo) *Segment {
func newSegmentMetricsFrom(segment *Segment) *metricsinfo.Segment { func newSegmentMetricsFrom(segment *Segment) *metricsinfo.Segment {
convertedSegment := metrics.NewSegmentFrom(segment.SegmentInfo) convertedSegment := metrics.NewSegmentFrom(segment.SegmentInfo)
convertedSegment.NodeID = segment.Node convertedSegment.NodeID = segment.Node
convertedSegment.LoadedTimestamp = segment.Version convertedSegment.LoadedTimestamp = tsoutil.PhysicalTimeFormat(segment.LastDeltaTimestamp)
convertedSegment.Index = lo.Map(lo.Values(segment.IndexInfo), func(e *querypb.FieldIndexInfo, i int) *metricsinfo.SegmentIndex { convertedSegment.Index = lo.Map(lo.Values(segment.IndexInfo), func(e *querypb.FieldIndexInfo, i int) *metricsinfo.SegmentIndex {
return &metricsinfo.SegmentIndex{ return &metricsinfo.SegmentIndex{
IndexFieldID: e.FieldID, IndexFieldID: e.FieldID,

View File

@ -233,7 +233,6 @@ func TestGetSegmentDistJSON(t *testing.T) {
assert.Equal(t, int64(1000), s.NumOfRows) assert.Equal(t, int64(1000), s.NumOfRows)
assert.Equal(t, "Flushed", s.State) assert.Equal(t, "Flushed", s.State)
assert.Equal(t, int64(1), s.NodeID) assert.Equal(t, int64(1), s.NodeID)
assert.Equal(t, int64(1), s.LoadedTimestamp)
} else if s.SegmentID == 2 { } else if s.SegmentID == 2 {
assert.Equal(t, int64(200), s.CollectionID) assert.Equal(t, int64(200), s.CollectionID)
assert.Equal(t, int64(20), s.PartitionID) assert.Equal(t, int64(20), s.PartitionID)
@ -241,7 +240,6 @@ func TestGetSegmentDistJSON(t *testing.T) {
assert.Equal(t, int64(2000), s.NumOfRows) assert.Equal(t, int64(2000), s.NumOfRows)
assert.Equal(t, "Flushed", s.State) assert.Equal(t, "Flushed", s.State)
assert.Equal(t, int64(2), s.NodeID) assert.Equal(t, int64(2), s.NodeID)
assert.Equal(t, int64(1), s.LoadedTimestamp)
} else { } else {
assert.Failf(t, "unexpected segment id", "unexpected segment id %d", s.SegmentID) assert.Failf(t, "unexpected segment id", "unexpected segment id %d", s.SegmentID)
} }

View File

@ -31,6 +31,7 @@ import (
"github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/proto/querypb"
"github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/internal/querycoordv2/meta"
"github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -502,17 +503,7 @@ func (task *LeaderTask) MarshalJSON() ([]byte, error) {
} }
func marshalJSON(task Task) ([]byte, error) { func marshalJSON(task Task) ([]byte, error) {
return json.Marshal(&struct { return json.Marshal(&metricsinfo.QueryCoordTask{
TaskName string `json:"task_name,omitempty"`
CollectionID int64 `json:"collection_id,omitempty"`
Replica int64 `json:"replica_id,omitempty"`
TaskType string `json:"task_type,omitempty"`
TaskStatus string `json:"task_status,omitempty"`
Priority string `json:"priority,omitempty"`
Actions []string `json:"actions,omitempty"`
Step int `json:"step,omitempty"`
Reason string `json:"reason,omitempty"`
}{
TaskName: task.Name(), TaskName: task.Name(),
CollectionID: task.CollectionID(), CollectionID: task.CollectionID(),
Replica: task.ReplicaID(), Replica: task.ReplicaID(),

View File

@ -18,6 +18,7 @@ package task
import ( import (
"context" "context"
"encoding/json"
"math/rand" "math/rand"
"strings" "strings"
"testing" "testing"
@ -45,6 +46,7 @@ import (
"github.com/milvus-io/milvus/pkg/kv" "github.com/milvus-io/milvus/pkg/kv"
"github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/etcd"
"github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/testutils" "github.com/milvus-io/milvus/pkg/util/testutils"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
@ -1837,8 +1839,11 @@ func (suite *TaskSuite) TestGetTasksJSON() {
suite.NoError(err) suite.NoError(err)
actualJSON := scheduler.GetTasksJSON() actualJSON := scheduler.GetTasksJSON()
suite.Contains(actualJSON, "SegmentTask")
suite.Contains(actualJSON, "ChannelTask") var tasks []*metricsinfo.QueryCoordTask
err = json.Unmarshal([]byte(actualJSON), &tasks)
suite.NoError(err)
suite.Equal(2, len(tasks))
} }
func TestTask(t *testing.T) { func TestTask(t *testing.T) {

View File

@ -32,6 +32,7 @@ import (
"github.com/milvus-io/milvus/pkg/mq/msgdispatcher" "github.com/milvus-io/milvus/pkg/mq/msgdispatcher"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -65,7 +66,7 @@ func TestGetPipelineJSON(t *testing.T) {
{ {
Name: ch, Name: ch,
WatchState: "Healthy", WatchState: "Healthy",
LatestTimeTick: typeutil.TimestampToString(0), LatestTimeTick: tsoutil.PhysicalTimeFormat(0),
NodeID: paramtable.GetNodeID(), NodeID: paramtable.GetNodeID(),
CollectionID: 1, CollectionID: 1,
}, },

View File

@ -30,6 +30,7 @@ import (
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
"github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/timerecord"
"github.com/milvus-io/milvus/pkg/util/tsoutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -170,7 +171,7 @@ func (m *manager) GetChannelStats() []*metricsinfo.Channel {
ret = append(ret, &metricsinfo.Channel{ ret = append(ret, &metricsinfo.Channel{
Name: ch, Name: ch,
WatchState: p.Status(), WatchState: p.Status(),
LatestTimeTick: typeutil.TimestampToString(tt), LatestTimeTick: tsoutil.PhysicalTimeFormat(tt),
NodeID: paramtable.GetNodeID(), NodeID: paramtable.GetNodeID(),
CollectionID: p.GetCollectionID(), CollectionID: p.GetCollectionID(),
}) })

View File

@ -1,6 +1,10 @@
package metrics package metrics
import ( import (
"strconv"
"github.com/samber/lo"
"github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/metricsinfo"
) )
@ -25,10 +29,17 @@ func NewDMChannelFrom(channel *datapb.VchannelInfo) *metricsinfo.DmChannel {
return &metricsinfo.DmChannel{ return &metricsinfo.DmChannel{
CollectionID: channel.GetCollectionID(), CollectionID: channel.GetCollectionID(),
ChannelName: channel.GetChannelName(), ChannelName: channel.GetChannelName(),
UnflushedSegmentIds: channel.GetUnflushedSegmentIds(), UnflushedSegmentIds: lo.Map(channel.GetUnflushedSegmentIds(), func(t int64, i int) string {
FlushedSegmentIds: channel.GetFlushedSegmentIds(), return strconv.FormatInt(t, 10)
DroppedSegmentIds: channel.GetDroppedSegmentIds(), }),
LevelZeroSegmentIds: channel.GetLevelZeroSegmentIds(), FlushedSegmentIds: lo.Map(channel.GetFlushedSegmentIds(), func(t int64, i int) string {
PartitionStatsVersions: channel.GetPartitionStatsVersions(), return strconv.FormatInt(t, 10)
}),
DroppedSegmentIds: lo.Map(channel.GetDroppedSegmentIds(), func(t int64, i int) string {
return strconv.FormatInt(t, 10)
}),
LevelZeroSegmentIds: lo.Map(channel.GetLevelZeroSegmentIds(), func(t int64, i int) string {
return strconv.FormatInt(t, 10)
}),
} }
} }

View File

@ -14,7 +14,7 @@ require (
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0
github.com/json-iterator/go v1.1.12 github.com/json-iterator/go v1.1.12
github.com/klauspost/compress v1.17.7 github.com/klauspost/compress v1.17.7
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620 github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f
github.com/nats-io/nats-server/v2 v2.10.12 github.com/nats-io/nats-server/v2 v2.10.12
github.com/nats-io/nats.go v1.34.1 github.com/nats-io/nats.go v1.34.1
github.com/panjf2000/ants/v2 v2.7.2 github.com/panjf2000/ants/v2 v2.7.2

View File

@ -488,8 +488,8 @@ github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119 h1:9VXijWu
github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119/go.mod h1:DvXTE/K/RtHehxU8/GtDs4vFtfw64jJ3PaCnFri8CRg= github.com/milvus-io/cgosymbolizer v0.0.0-20240722103217-b7dee0e50119/go.mod h1:DvXTE/K/RtHehxU8/GtDs4vFtfw64jJ3PaCnFri8CRg=
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b h1:TfeY0NxYxZzUfIfYe5qYDBzt4ZYRqzUjTR6CvUzjat8= github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b h1:TfeY0NxYxZzUfIfYe5qYDBzt4ZYRqzUjTR6CvUzjat8=
github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b/go.mod h1:iwW+9cWfIzzDseEBCCeDSN5SD16Tidvy8cwQ7ZY8Qj4= github.com/milvus-io/gorocksdb v0.0.0-20220624081344-8c5f4212846b/go.mod h1:iwW+9cWfIzzDseEBCCeDSN5SD16Tidvy8cwQ7ZY8Qj4=
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620 h1:0IWUDtDloift7cQHalhdjuVkL/3qSeiXFqR7MofZBkg= github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f h1:yLxT8NH0ixUOJMqJuk0xvGf0cKsr+N2xibyTat256PI=
github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241108105827-266fb751b620/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs= github.com/milvus-io/milvus-proto/go-api/v2 v2.3.4-0.20241111062829-6de3d96f664f/go.mod h1:/6UT4zZl6awVeXLeE7UGDWZvXj3IWkRsh3mqsn0DiAs=
github.com/milvus-io/pulsar-client-go v0.12.1 h1:O2JZp1tsYiO7C0MQ4hrUY/aJXnn2Gry6hpm7UodghmE= github.com/milvus-io/pulsar-client-go v0.12.1 h1:O2JZp1tsYiO7C0MQ4hrUY/aJXnn2Gry6hpm7UodghmE=
github.com/milvus-io/pulsar-client-go v0.12.1/go.mod h1:dkutuH4oS2pXiGm+Ti7fQZ4MRjrMPZ8IJeEGAWMeckk= github.com/milvus-io/pulsar-client-go v0.12.1/go.mod h1:dkutuH4oS2pXiGm+Ti7fQZ4MRjrMPZ8IJeEGAWMeckk=
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g= github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
@ -655,7 +655,6 @@ github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasO
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=

View File

@ -15,7 +15,6 @@ import (
"encoding/json" "encoding/json"
"github.com/milvus-io/milvus-proto/go-api/v2/rgpb" "github.com/milvus-io/milvus-proto/go-api/v2/rgpb"
"github.com/milvus-io/milvus/pkg/util/typeutil"
) )
// ComponentInfos defines the interface of all component infos // ComponentInfos defines the interface of all component infos
@ -90,7 +89,7 @@ type SlowQuery struct {
Partitions string `json:"partitions,omitempty"` Partitions string `json:"partitions,omitempty"`
ConsistencyLevel string `json:"consistency_level,omitempty"` ConsistencyLevel string `json:"consistency_level,omitempty"`
UseDefaultConsistency bool `json:"use_default_consistency,omitempty"` UseDefaultConsistency bool `json:"use_default_consistency,omitempty"`
GuaranteeTimestamp uint64 `json:"guarantee_timestamp,omitempty"` GuaranteeTimestamp uint64 `json:"guarantee_timestamp,omitempty,string"`
Duration string `json:"duration,omitempty"` Duration string `json:"duration,omitempty"`
User string `json:"user,omitempty"` User string `json:"user,omitempty"`
QueryParams *QueryParams `json:"query_params,omitempty"` QueryParams *QueryParams `json:"query_params,omitempty"`
@ -100,24 +99,23 @@ type SlowQuery struct {
type DmChannel struct { type DmChannel struct {
NodeID int64 `json:"node_id,omitempty"` NodeID int64 `json:"node_id,omitempty"`
Version int64 `json:"version,omitempty"` Version int64 `json:"version,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
ChannelName string `json:"channel_name,omitempty"` ChannelName string `json:"channel_name,omitempty"`
UnflushedSegmentIds []int64 `json:"unflushed_segment_ids,omitempty"` UnflushedSegmentIds []string `json:"unflushed_segment_ids,omitempty"`
FlushedSegmentIds []int64 `json:"flushed_segment_ids,omitempty"` FlushedSegmentIds []string `json:"flushed_segment_ids,omitempty"`
DroppedSegmentIds []int64 `json:"dropped_segment_ids,omitempty"` DroppedSegmentIds []string `json:"dropped_segment_ids,omitempty"`
LevelZeroSegmentIds []int64 `json:"level_zero_segment_ids,omitempty"` LevelZeroSegmentIds []string `json:"level_zero_segment_ids,omitempty"`
PartitionStatsVersions map[int64]int64 `json:"partition_stats_versions,omitempty"`
WatchState string `json:"watch_state,omitempty"` WatchState string `json:"watch_state,omitempty"`
StartWatchTS int64 `json:"start_watch_ts,omitempty"` StartWatchTS string `json:"start_watch_ts,omitempty"`
} }
type Segment struct { type Segment struct {
SegmentID int64 `json:"segment_id,omitempty"` SegmentID int64 `json:"segment_id,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
PartitionID int64 `json:"partition_id,omitempty"` PartitionID int64 `json:"partition_id,omitempty,string"`
Channel string `json:"channel,omitempty"` Channel string `json:"channel,omitempty"`
NumOfRows int64 `json:"num_of_rows,omitempty"` NumOfRows int64 `json:"num_of_rows,omitempty,string"`
State string `json:"state,omitempty"` State string `json:"state,omitempty"`
IsImporting bool `json:"is_importing,omitempty"` IsImporting bool `json:"is_importing,omitempty"`
Compacted bool `json:"compacted,omitempty"` Compacted bool `json:"compacted,omitempty"`
@ -127,42 +125,54 @@ type Segment struct {
// load related // load related
IsInvisible bool `json:"is_invisible,omitempty"` IsInvisible bool `json:"is_invisible,omitempty"`
LoadedTimestamp int64 `json:"loaded_timestamp,omitempty"` LoadedTimestamp string `json:"loaded_timestamp,omitempty,string"`
Index []*SegmentIndex `json:"index,omitempty"` Index []*SegmentIndex `json:"index,omitempty"`
ResourceGroup string `json:"resource_group,omitempty"` ResourceGroup string `json:"resource_group,omitempty"`
LoadedInsertRowCount int64 `json:"loaded_insert_row_count,omitempty"` // inert row count for growing segment that excludes the deleted row count in QueryNode LoadedInsertRowCount int64 `json:"loaded_insert_row_count,omitempty,string"` // inert row count for growing segment that excludes the deleted row count in QueryNode
MemSize int64 `json:"mem_size,omitempty"` // memory size of segment in QueryNode MemSize int64 `json:"mem_size,omitempty,string"` // memory size of segment in QueryNode
// flush related // flush related
FlushedRows int64 `json:"flushed_rows,omitempty"` FlushedRows int64 `json:"flushed_rows,omitempty,string"`
SyncBufferRows int64 `json:"sync_buffer_rows,omitempty"` SyncBufferRows int64 `json:"sync_buffer_rows,omitempty,string"`
SyncingRows int64 `json:"syncing_rows,omitempty"` SyncingRows int64 `json:"syncing_rows,omitempty,string"`
} }
type SegmentIndex struct { type SegmentIndex struct {
IndexFieldID int64 `json:"field_id,omitempty"` IndexFieldID int64 `json:"field_id,omitempty,string"`
IndexID int64 `json:"index_id,omitempty"` IndexID int64 `json:"index_id,omitempty,string"`
BuildID int64 `json:"build_id,omitempty"` BuildID int64 `json:"build_id,omitempty,string"`
IndexSize int64 `json:"index_size,omitempty"` IndexSize int64 `json:"index_size,omitempty,string"`
IsLoaded bool `json:"is_loaded,omitempty"` IsLoaded bool `json:"is_loaded,omitempty,string"`
} }
type QueryCoordTarget struct { type QueryCoordTarget struct {
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
Segments []*Segment `json:"segments,omitempty"` Segments []*Segment `json:"segments,omitempty"`
DMChannels []*DmChannel `json:"dm_channels,omitempty"` DMChannels []*DmChannel `json:"dm_channels,omitempty"`
} }
type QueryCoordTask struct {
TaskName string `json:"task_name,omitempty"`
CollectionID int64 `json:"collection_id,omitempty,string"`
Replica int64 `json:"replica_id,omitempty,string"`
TaskType string `json:"task_type,omitempty"`
TaskStatus string `json:"task_status,omitempty"`
Priority string `json:"priority,omitempty"`
Actions []string `json:"actions,omitempty"`
Step int `json:"step,omitempty"`
Reason string `json:"reason,omitempty"`
}
type LeaderView struct { type LeaderView struct {
LeaderID int64 `json:"leader_id,omitempty"` LeaderID int64 `json:"leader_id,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
NodeID int64 `json:"node_id,omitempty"` NodeID int64 `json:"node_id,omitempty"`
Channel string `json:"channel,omitempty"` Channel string `json:"channel,omitempty"`
Version int64 `json:"version,omitempty"` Version int64 `json:"version,omitempty,string"`
SealedSegments []*Segment `json:"sealed_segments,omitempty"` SealedSegments []*Segment `json:"sealed_segments,omitempty"`
GrowingSegments []*Segment `json:"growing_segments,omitempty"` GrowingSegments []*Segment `json:"growing_segments,omitempty"`
TargetVersion int64 `json:"target_version,omitempty"` TargetVersion int64 `json:"target_version,omitempty,string"`
NumOfGrowingRows int64 `json:"num_of_growing_rows,omitempty"` NumOfGrowingRows int64 `json:"num_of_growing_rows,omitempty,string"`
UnServiceableError string `json:"unserviceable_error,omitempty"` UnServiceableError string `json:"unserviceable_error,omitempty"`
} }
@ -179,8 +189,8 @@ type ResourceGroup struct {
} }
type Replica struct { type Replica struct {
ID int64 `json:"ID,omitempty"` ID int64 `json:"ID,omitempty,string"`
CollectionID int64 `json:"collectionID,omitempty"` CollectionID int64 `json:"collectionID,omitempty,string"`
RWNodes []int64 `json:"rw_nodes,omitempty"` RWNodes []int64 `json:"rw_nodes,omitempty"`
ResourceGroup string `json:"resource_group,omitempty"` ResourceGroup string `json:"resource_group,omitempty"`
RONodes []int64 `json:"ro_nodes,omitempty"` RONodes []int64 `json:"ro_nodes,omitempty"`
@ -192,8 +202,8 @@ type Channel struct {
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
WatchState string `json:"watch_state,omitempty"` WatchState string `json:"watch_state,omitempty"`
LatestTimeTick string `json:"latest_time_tick,omitempty"` // a time string that indicates the latest time tick of the channel is received LatestTimeTick string `json:"latest_time_tick,omitempty"` // a time string that indicates the latest time tick of the channel is received
NodeID int64 `json:"node_id,omitempty"` NodeID int64 `json:"node_id,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
CheckpointTS string `json:"check_point_ts,omitempty"` // a time string, format like "2006-01-02 15:04:05" CheckpointTS string `json:"check_point_ts,omitempty"` // a time string, format like "2006-01-02 15:04:05"
} }
@ -290,16 +300,28 @@ type DataNodeConfiguration struct {
FlushInsertBufferSize int64 `json:"flush_insert_buffer_size"` FlushInsertBufferSize int64 `json:"flush_insert_buffer_size"`
} }
type IndexTaskStats struct {
IndexID int64 `json:"index_id,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty,string"`
SegmentID int64 `json:"segment_id,omitempty,string"`
BuildID int64 `json:"build_id,omitempty,string"`
IndexState string `json:"index_state,omitempty"`
FailReason string `json:"fail_reason,omitempty"`
IndexSize uint64 `json:"index_size,omitempty,string"`
IndexVersion int64 `json:"index_version,omitempty,string"`
CreatedUTCTime string `json:"create_time,omitempty"`
}
type SyncTask struct { type SyncTask struct {
SegmentID int64 `json:"segment_id,omitempty"` SegmentID int64 `json:"segment_id,omitempty,string"`
BatchRows int64 `json:"batch_rows,omitempty"` BatchRows int64 `json:"batch_rows,omitempty,string"`
SegmentLevel string `json:"segment_level,omitempty"` SegmentLevel string `json:"segment_level,omitempty,string"`
TSFrom typeutil.Timestamp `json:"ts_from,omitempty"` TSFrom string `json:"ts_from,omitempty"`
TSTo typeutil.Timestamp `json:"ts_to,omitempty"` TSTo string `json:"ts_to,omitempty"`
DeltaRowCount int64 `json:"delta_row_count,omitempty"` DeltaRowCount int64 `json:"delta_row_count,omitempty,string"`
FlushSize int64 `json:"flush_size,omitempty"` FlushSize int64 `json:"flush_size,omitempty,string"`
RunningTime string `json:"running_time,omitempty"` RunningTime string `json:"running_time,omitempty"`
NodeID int64 `json:"node_id,omitempty"` NodeID int64 `json:"node_id,omitempty,string"`
} }
// DataNodeInfos implements ComponentInfos // DataNodeInfos implements ComponentInfos
@ -343,10 +365,10 @@ type DataCoordInfos struct {
} }
type ImportTask struct { type ImportTask struct {
JobID int64 `json:"job_id,omitempty"` JobID int64 `json:"job_id,omitempty,string"`
TaskID int64 `json:"task_id,omitempty"` TaskID int64 `json:"task_id,omitempty,string"`
CollectionID int64 `json:"collection_id,omitempty"` CollectionID int64 `json:"collection_id,omitempty,string"`
NodeID int64 `json:"node_id,omitempty"` NodeID int64 `json:"node_id,omitempty,string"`
State string `json:"state,omitempty"` State string `json:"state,omitempty"`
Reason string `json:"reason,omitempty"` Reason string `json:"reason,omitempty"`
TaskType string `json:"task_type,omitempty"` TaskType string `json:"task_type,omitempty"`
@ -360,11 +382,11 @@ type CompactionTask struct {
Type string `json:"type,omitempty"` Type string `json:"type,omitempty"`
State string `json:"state,omitempty"` State string `json:"state,omitempty"`
FailReason string `json:"fail_reason,omitempty"` FailReason string `json:"fail_reason,omitempty"`
StartTime int64 `json:"start_time,omitempty"` StartTime string `json:"start_time,omitempty"`
EndTime int64 `json:"end_time,omitempty"` EndTime string `json:"end_time,omitempty"`
TotalRows int64 `json:"total_rows,omitempty"` TotalRows int64 `json:"total_rows,omitempty,string"`
InputSegments []int64 `json:"input_segments,omitempty"` InputSegments []string `json:"input_segments,omitempty"`
ResultSegments []int64 `json:"result_segments,omitempty"` ResultSegments []string `json:"result_segments,omitempty"`
} }
// RootCoordConfiguration records the configuration of RootCoord. // RootCoordConfiguration records the configuration of RootCoord.
@ -377,3 +399,67 @@ type RootCoordInfos struct {
BaseComponentInfos BaseComponentInfos
SystemConfigurations RootCoordConfiguration `json:"system_configurations"` SystemConfigurations RootCoordConfiguration `json:"system_configurations"`
} }
type Collections struct {
CollectionNames []string ` json:"collection_names,omitempty"`
CollectionIDs []string `json:"collection_ids,omitempty"`
CreatedUtcTimestamps []string `json:"created_utc_timestamps,omitempty"`
// Load percentage on querynode when type is InMemory
InMemoryPercentages []int `json:"inMemory_percentages,omitempty"`
// Indicate whether query service is available
QueryServiceAvailable []bool `json:"query_service_available,omitempty"`
}
type PartitionInfo struct {
PartitionName string `json:"partition_name,omitempty"`
PartitionID int64 `json:"partition_id,omitempty,string"`
CreatedUtcTimestamp string `json:"created_utc_timestamp,omitempty"`
}
type Field struct {
FieldID string `json:"field_id,omitempty,string"`
Name string `json:"name,omitempty"`
IsPrimaryKey bool `json:"is_primary_key,omitempty"`
Description string `json:"description,omitempty"`
DataType string `json:"data_type,omitempty"`
TypeParams map[string]string `json:"type_params,omitempty"`
IndexParams map[string]string `json:"index_params,omitempty"`
AutoID bool `json:"auto_id,omitempty"`
ElementType string `json:"element_type,omitempty"`
DefaultValue string `json:"default_value,omitempty"`
IsDynamic bool `json:"is_dynamic,omitempty"`
IsPartitionKey bool `json:"is_partition_key,omitempty"`
IsClusteringKey bool `json:"is_clustering_key,omitempty"`
Nullable bool `json:"nullable,omitempty"`
IsFunctionOutput bool `json:"is_function_output,omitempty"`
}
type Collection struct {
CollectionID string `json:"collection_id,omitempty,string"`
CollectionName string `json:"collection_name,omitempty"`
CreatedTime string `json:"created_time,omitempty"`
ShardsNum int `json:"shards_num,omitempty"`
ConsistencyLevel string `json:"consistency_level,omitempty"`
Aliases []string `json:"aliases,omitempty"`
Properties map[string]string `json:"properties,omitempty"`
DBName string `json:"db_name,omitempty,string"`
NumPartitions int `json:"num_partitions,omitempty,string"`
VirtualChannelNames []string `json:"virtual_channel_names,omitempty"`
PhysicalChannelNames []string `json:"physical_channel_names,omitempty"`
PartitionInfos []*PartitionInfo `json:"partition_infos,omitempty"`
EnableDynamicField bool `json:"enable_dynamic_field,omitempty"`
Fields []*Field `json:"fields,omitempty"`
}
type Database struct {
DBName string `json:"db_name,omitempty"`
DBID int64 `json:"dbID,omitempty"`
CreatedTimestamp string `json:"created_timestamp,omitempty"`
Properties map[string]string `json:"properties,omitempty"`
}
type Databases struct {
Names []string `json:"db_names,omitempty"`
IDs []string `json:"db_ids,omitempty"`
CreatedTimestamps []string `json:"created_timestamps,omitempty"`
}

View File

@ -14,14 +14,18 @@ package metricsinfo
import ( import (
"encoding/json" "encoding/json"
"os" "os"
"strconv"
"strings" "strings"
"time" "time"
"github.com/samber/lo"
"go.uber.org/zap" "go.uber.org/zap"
"github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb"
"github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb"
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
"github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/log"
"github.com/milvus-io/milvus/pkg/util/funcutil"
"github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/typeutil"
) )
@ -125,3 +129,61 @@ func NewSlowQueryWithSearchRequest(request *milvuspb.SearchRequest, user string,
Time: time.Now().Format(time.DateTime), Time: time.Now().Format(time.DateTime),
} }
} }
func NewPartitionInfos(partitions *milvuspb.ShowPartitionsResponse) []*PartitionInfo {
partitionInfos := make([]*PartitionInfo, len(partitions.PartitionNames))
for i := range partitions.PartitionNames {
partitionInfos[i] = &PartitionInfo{
PartitionName: partitions.PartitionNames[i],
PartitionID: partitions.PartitionIDs[i],
CreatedUtcTimestamp: typeutil.TimestampToString(partitions.CreatedUtcTimestamps[i]),
}
}
return partitionInfos
}
func NewFields(fields *schemapb.CollectionSchema) []*Field {
fieldInfos := make([]*Field, len(fields.Fields))
for i, f := range fields.Fields {
fieldInfos[i] = &Field{
FieldID: strconv.FormatInt(f.FieldID, 10),
Name: f.Name,
IsPrimaryKey: f.IsPrimaryKey,
Description: f.Description,
DataType: f.DataType.String(),
TypeParams: funcutil.KeyValuePair2Map(f.TypeParams),
IndexParams: funcutil.KeyValuePair2Map(f.IndexParams),
AutoID: f.AutoID,
ElementType: f.ElementType.String(),
DefaultValue: f.DefaultValue.String(),
IsDynamic: f.IsDynamic,
IsPartitionKey: f.IsPartitionKey,
IsClusteringKey: f.IsClusteringKey,
Nullable: f.Nullable,
IsFunctionOutput: f.IsFunctionOutput,
}
}
return fieldInfos
}
func NewDatabase(resp *milvuspb.DescribeDatabaseResponse) *Database {
return &Database{
DBName: resp.GetDbName(),
DBID: resp.GetDbID(),
CreatedTimestamp: typeutil.TimestampToString(uint64(int64(resp.GetCreatedTimestamp()) / int64(time.Millisecond) / int64(time.Nanosecond))),
Properties: funcutil.KeyValuePair2Map(resp.GetProperties()),
}
}
func NewDatabases(resp *milvuspb.ListDatabasesResponse) *Databases {
createdTimestamps := make([]string, len(resp.GetCreatedTimestamp()))
for i, ts := range resp.GetCreatedTimestamp() {
createdTimestamps[i] = typeutil.TimestampToString(uint64(int64(ts) / int64(time.Millisecond) / int64(time.Nanosecond)))
}
return &Databases{
Names: resp.GetDbNames(),
IDs: lo.Map(resp.GetDbIds(), func(t int64, i int) string { return strconv.FormatInt(t, 10) }),
CreatedTimestamps: createdTimestamps,
}
}

View File

@ -95,3 +95,7 @@ func SubByNow(ts uint64) int64 {
now := time.Now().UnixMilli() now := time.Now().UnixMilli()
return now - utcT return now - utcT
} }
func PhysicalTimeFormat(ts uint64) string {
return PhysicalTime(ts).Format(time.DateTime)
}

View File

@ -47,6 +47,9 @@ func SubTimeByWallClock(after, before time.Time) time.Duration {
} }
func TimestampToString(ts uint64) string { func TimestampToString(ts uint64) string {
ut := time.Unix(int64(ts), 0) if ts <= 0 {
return ""
}
ut := time.UnixMilli(int64(ts))
return ut.Format(time.DateTime) return ut.Format(time.DateTime)
} }