diff --git a/cmd/cdc/cli/cli_changefeed_query.go b/cmd/cdc/cli/cli_changefeed_query.go index 1fe44310a2..8c2138e009 100644 --- a/cmd/cdc/cli/cli_changefeed_query.go +++ b/cmd/cdc/cli/cli_changefeed_query.go @@ -14,6 +14,10 @@ package cli import ( + "bytes" + "encoding/json" + + "github.com/BurntSushi/toml" "github.com/pingcap/errors" v2 "github.com/pingcap/ticdc/api/v2" "github.com/pingcap/ticdc/cmd/cdc/factory" @@ -25,7 +29,7 @@ import ( "github.com/spf13/cobra" ) -// cfMeta holds changefeed info and changefeed status. +// cfMeta holds changefeed info and changefeed status for JSON output. type cfMeta struct { UpstreamID uint64 `json:"upstream_id"` ID string `json:"id"` @@ -46,17 +50,54 @@ type cfMeta struct { TaskStatus []config.CaptureTaskStatus `json:"task_status,omitempty"` } +// cfMetaSimplifiedTOML holds simplified changefeed info for TOML output. +type cfMetaSimplifiedTOML struct { + UpstreamID uint64 `toml:"upstream-id"` + ID string `toml:"id"` + Keyspace string `toml:"keyspace"` + FeedState config.FeedState `toml:"state"` + CheckpointTSO uint64 `toml:"checkpoint-tso"` + CheckpointTime api.JSONTime `toml:"checkpoint-time"` + RunningError *config.RunningError `toml:"error,omitempty"` +} + +// cfMetaTOML holds changefeed info for TOML output. Config is interface{} +// because it holds a map[string]interface{} produced by BurntSushi/toml +// encoding (which correctly handles time.Duration as human-readable strings). +type cfMetaTOML struct { + UpstreamID uint64 `toml:"upstream-id"` + ID string `toml:"id"` + Keyspace string `toml:"keyspace"` + SinkURI string `toml:"sink-uri"` + Config interface{} `toml:"config"` + CreateTime api.JSONTime `toml:"create-time"` + StartTs uint64 `toml:"start-ts"` + ResolvedTs uint64 `toml:"resolved-ts"` + TargetTs uint64 `toml:"target-ts"` + CheckpointTSO uint64 `toml:"checkpoint-tso"` + CheckpointTime api.JSONTime `toml:"checkpoint-time"` + Engine config.SortEngine `toml:"sort-engine,omitempty"` + FeedState config.FeedState `toml:"state"` + RunningError *config.RunningError `toml:"error,omitempty"` + ErrorHis []int64 `toml:"error-history,omitempty"` + CreatorVersion string `toml:"creator-version"` + TaskStatus []config.CaptureTaskStatus `toml:"task-status,omitempty"` +} + // queryChangefeedOptions defines flags for the `cli changefeed query` command. type queryChangefeedOptions struct { apiClientV2 apiv2client.APIV2Interface changefeedID string simplified bool keyspace string + outputFormat string } // newQueryChangefeedOptions creates new options for the `cli changefeed query` command. func newQueryChangefeedOptions() *queryChangefeedOptions { - return &queryChangefeedOptions{} + return &queryChangefeedOptions{ + outputFormat: "json", + } } // addFlags receives a *cobra.Command reference and binds @@ -65,6 +106,7 @@ func (o *queryChangefeedOptions) addFlags(cmd *cobra.Command) { cmd.PersistentFlags().StringVarP(&o.keyspace, "keyspace", "k", "default", "Replication task (changefeed) Keyspace") cmd.PersistentFlags().BoolVarP(&o.simplified, "simple", "s", false, "Output simplified replication status") cmd.PersistentFlags().StringVarP(&o.changefeedID, "changefeed-id", "c", "", "Replication task (changefeed) ID") + cmd.PersistentFlags().StringVarP(&o.outputFormat, "output", "o", "json", "Output format (json|toml)") _ = cmd.MarkPersistentFlagRequired("changefeed-id") } @@ -81,6 +123,12 @@ func (o *queryChangefeedOptions) complete(f factory.Factory) error { // run the `cli changefeed query` command. func (o *queryChangefeedOptions) run(cmd *cobra.Command) error { ctx := cmd.Context() + + format, err := util.ParseOutputFormat(o.outputFormat) + if err != nil { + return err + } + if o.simplified { infos, err := o.apiClientV2.Changefeeds().List(ctx, o.keyspace, "all") if err != nil { @@ -88,6 +136,18 @@ func (o *queryChangefeedOptions) run(cmd *cobra.Command) error { } for _, info := range infos { if info.ID == o.changefeedID { + if format == util.OutputFormatTOML { + simplified := &cfMetaSimplifiedTOML{ + UpstreamID: info.UpstreamID, + ID: info.ID, + Keyspace: info.Keyspace, + FeedState: info.FeedState, + CheckpointTSO: info.CheckpointTSO, + CheckpointTime: info.CheckpointTime, + RunningError: info.RunningError, + } + return util.TOMLPrint(cmd, simplified) + } return util.JSONPrint(cmd, info) } } @@ -98,6 +158,32 @@ func (o *queryChangefeedOptions) run(cmd *cobra.Command) error { if err != nil { return err } + + if format == util.OutputFormatTOML { + cfgMap, err := configToMap(detail.Config, format) + if err != nil { + return errors.Annotate(err, "marshal changefeed config") + } + meta := &cfMetaTOML{ + UpstreamID: detail.UpstreamID, + ID: detail.ID, + Keyspace: detail.Keyspace, + SinkURI: detail.SinkURI, + Config: cfgMap, + CreateTime: api.JSONTime(detail.CreateTime), + StartTs: detail.StartTs, + ResolvedTs: detail.ResolvedTs, + TargetTs: detail.TargetTs, + CheckpointTSO: detail.CheckpointTs, + CheckpointTime: detail.CheckpointTime, + FeedState: detail.State, + RunningError: detail.Error, + CreatorVersion: detail.CreatorVersion, + TaskStatus: detail.TaskStatus, + } + return util.TOMLPrint(cmd, meta) + } + meta := &cfMeta{ UpstreamID: detail.UpstreamID, ID: detail.ID, @@ -136,3 +222,37 @@ func newCmdQueryChangefeed(f factory.Factory) *cobra.Command { return command } + +// configToMap serializes a ReplicaConfig to a map[string]interface{} using +// either JSON or TOML encoding. The encoding determines the map's key naming +// convention: JSON produces snake_case keys, TOML produces kebab-case keys. +// +// The TOML path converts through the internal config.ReplicaConfig (which has +// toml struct tags) and encodes with BurntSushi/toml, keeping TOML-specific +// concerns out of the API model. +func configToMap(cfg *v2.ReplicaConfig, format util.OutputFormat) (map[string]interface{}, error) { + if cfg == nil { + return nil, nil + } + if format == util.OutputFormatTOML { + internalCfg := cfg.ToInternalReplicaConfig() + var buf bytes.Buffer + if err := toml.NewEncoder(&buf).Encode(internalCfg); err != nil { + return nil, errors.Trace(err) + } + var m map[string]interface{} + if _, err := toml.NewDecoder(&buf).Decode(&m); err != nil { + return nil, errors.Trace(err) + } + return m, nil + } + data, err := json.Marshal(cfg) + if err != nil { + return nil, errors.Trace(err) + } + var m map[string]interface{} + if err := json.Unmarshal(data, &m); err != nil { + return nil, errors.Trace(err) + } + return m, nil +} diff --git a/cmd/cdc/cli/cli_changefeed_query_test.go b/cmd/cdc/cli/cli_changefeed_query_test.go index cd974686a0..70f153eedd 100644 --- a/cmd/cdc/cli/cli_changefeed_query_test.go +++ b/cmd/cdc/cli/cli_changefeed_query_test.go @@ -18,10 +18,12 @@ import ( "io" "os" "testing" + "time" "github.com/golang/mock/gomock" "github.com/pingcap/errors" v2 "github.com/pingcap/ticdc/api/v2" + "github.com/pingcap/ticdc/cmd/util" "github.com/pingcap/ticdc/pkg/api" "github.com/pingcap/ticdc/pkg/api/v2/mock" "github.com/stretchr/testify/require" @@ -88,3 +90,258 @@ func TestChangefeedQueryCli(t *testing.T) { os.Args = []string{"query", "--simple=false", "--changefeed-id=bcd"} require.NotNil(t, o.run(cmd)) } + +func TestChangefeedQueryTOMLOutput(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + cfV2 := mock.NewMockChangefeedInterface(ctrl) + f := &mockFactory{changefeeds: cfV2} + o := newQueryChangefeedOptions() + o.complete(f) + cmd := newCmdQueryChangefeed(f) + + caseSensitive := true + workerNum := 8 + syncInterval := newJSONDuration(t, 10*time.Minute) + cfV2.EXPECT().Get(gomock.Any(), gomock.Any(), "test-toml").Return(&v2.ChangeFeedInfo{ + UpstreamID: 1, + ID: "test-toml", + Keyspace: "default", + SinkURI: "blackhole://", + Config: &v2.ReplicaConfig{ + CaseSensitive: &caseSensitive, + SyncPointInterval: &syncInterval, + Mounter: &v2.MounterConfig{WorkerNum: &workerNum}, + }, + }, nil) + + o.simplified = false + o.changefeedID = "test-toml" + o.outputFormat = "toml" + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + require.Nil(t, o.run(cmd)) + out := buf.String() + + require.Contains(t, out, "sink-uri = 'blackhole://'") + require.Contains(t, out, "id = 'test-toml'") + require.Contains(t, out, "[config]") + require.Contains(t, out, "case-sensitive = true") + require.Contains(t, out, "worker-num = 8") + require.NotContains(t, out, "case_sensitive") + require.NotContains(t, out, "worker_num") +} + +func TestChangefeedQueryTOMLDurationFormat(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + cfV2 := mock.NewMockChangefeedInterface(ctrl) + f := &mockFactory{changefeeds: cfV2} + o := newQueryChangefeedOptions() + o.complete(f) + cmd := newCmdQueryChangefeed(f) + + syncInterval := newJSONDuration(t, 10*time.Minute) + syncRetention := newJSONDuration(t, 24*time.Hour) + enableSync := true + cfV2.EXPECT().Get(gomock.Any(), gomock.Any(), "test-dur").Return(&v2.ChangeFeedInfo{ + UpstreamID: 1, + ID: "test-dur", + SinkURI: "blackhole://", + Config: &v2.ReplicaConfig{ + EnableSyncPoint: &enableSync, + SyncPointInterval: &syncInterval, + SyncPointRetention: &syncRetention, + }, + }, nil) + + o.simplified = false + o.changefeedID = "test-dur" + o.outputFormat = "toml" + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + require.Nil(t, o.run(cmd)) + out := buf.String() + + require.Contains(t, out, "sync-point-interval = '10m0s'") + require.Contains(t, out, "sync-point-retention = '24h0m0s'") + require.NotContains(t, out, "600000000000") + require.NotContains(t, out, "86400000000000") +} + +func TestConfigToMap_TOMLKeyNaming(t *testing.T) { + caseSensitive := true + workerNum := 4 + cfg := &v2.ReplicaConfig{ + CaseSensitive: &caseSensitive, + Mounter: &v2.MounterConfig{WorkerNum: &workerNum}, + } + + tomlMap, err := configToMap(cfg, "toml") + require.NoError(t, err) + require.Contains(t, tomlMap, "case-sensitive") + require.NotContains(t, tomlMap, "case_sensitive") + mounter, ok := tomlMap["mounter"].(map[string]interface{}) + require.True(t, ok) + require.Contains(t, mounter, "worker-num") + + jsonMap, err := configToMap(cfg, "json") + require.NoError(t, err) + require.Contains(t, jsonMap, "case_sensitive") + require.NotContains(t, jsonMap, "case-sensitive") +} + +func TestConfigToMapNil(t *testing.T) { + m, err := configToMap(nil, "toml") + require.NoError(t, err) + require.Nil(t, m) + + m, err = configToMap(nil, "json") + require.NoError(t, err) + require.Nil(t, m) +} + +func TestChangefeedQueryTOMLTimestamp(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + cfV2 := mock.NewMockChangefeedInterface(ctrl) + f := &mockFactory{changefeeds: cfV2} + o := newQueryChangefeedOptions() + o.complete(f) + cmd := newCmdQueryChangefeed(f) + + createTime := time.Date(2026, 4, 18, 9, 35, 0, 139000000, time.UTC) + cfV2.EXPECT().Get(gomock.Any(), gomock.Any(), "test-ts").Return(&v2.ChangeFeedInfo{ + UpstreamID: 1, + ID: "test-ts", + SinkURI: "blackhole://", + CreateTime: createTime, + Config: &v2.ReplicaConfig{}, + }, nil) + + o.simplified = false + o.changefeedID = "test-ts" + o.outputFormat = "toml" + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + require.Nil(t, o.run(cmd)) + out := buf.String() + + require.Contains(t, out, "create-time = '2026-04-18 09:35:00.139'") +} + +func TestChangefeedQuerySimplifiedTOML(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + cfV2 := mock.NewMockChangefeedInterface(ctrl) + f := &mockFactory{changefeeds: cfV2} + o := newQueryChangefeedOptions() + o.complete(f) + cmd := newCmdQueryChangefeed(f) + + cfV2.EXPECT().List(gomock.Any(), gomock.Any(), "all").Return([]v2.ChangefeedCommonInfo{ + { + UpstreamID: 1, + Keyspace: "default", + ID: "abc", + CheckpointTime: api.JSONTime{}, + RunningError: nil, + }, + }, nil) + + o.simplified = true + o.changefeedID = "abc" + o.outputFormat = "toml" + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + require.Nil(t, o.run(cmd)) + out := buf.String() + + require.Contains(t, out, "id = 'abc'") + require.Contains(t, out, "keyspace = 'default'") + require.Contains(t, out, "upstream-id = 1") + require.Contains(t, out, "checkpoint-tso") + // Must use kebab-case, not Go field names + require.NotContains(t, out, "UpstreamID") + require.NotContains(t, out, "FeedState") + require.NotContains(t, out, "CheckpointTSO") +} + +func TestChangefeedQueryTOMLNilConfig(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + cfV2 := mock.NewMockChangefeedInterface(ctrl) + f := &mockFactory{changefeeds: cfV2} + o := newQueryChangefeedOptions() + o.complete(f) + cmd := newCmdQueryChangefeed(f) + + cfV2.EXPECT().Get(gomock.Any(), gomock.Any(), "nil-cfg").Return(&v2.ChangeFeedInfo{ + UpstreamID: 1, + ID: "nil-cfg", + SinkURI: "blackhole://", + Config: nil, + }, nil) + + o.simplified = false + o.changefeedID = "nil-cfg" + o.outputFormat = "toml" + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + require.Nil(t, o.run(cmd)) + out := buf.String() + + require.Contains(t, out, "id = 'nil-cfg'") +} + +func TestJSONTimeMarshalUnmarshalText(t *testing.T) { + original := api.JSONTime(time.Date(2026, 4, 18, 9, 35, 0, 139000000, time.UTC)) + + text, err := original.MarshalText() + require.NoError(t, err) + require.Equal(t, "2026-04-18 09:35:00.139", string(text)) + + var parsed api.JSONTime + require.NoError(t, parsed.UnmarshalText(text)) + require.Equal(t, original, parsed) +} + +func TestJSONTimeUnmarshalTextError(t *testing.T) { + var parsed api.JSONTime + err := parsed.UnmarshalText([]byte("not-a-timestamp")) + require.Error(t, err) +} + +// newJSONDuration constructs a v2.JSONDuration via JSON round-trip since the +// duration field is unexported. +func newJSONDuration(t *testing.T, d time.Duration) v2.JSONDuration { + t.Helper() + data := []byte(`"` + d.String() + `"`) + var jd v2.JSONDuration + require.NoError(t, jd.UnmarshalJSON(data)) + return jd +} + +func TestParseOutputFormat(t *testing.T) { + tests := []struct { + input string + want string + wantErr bool + }{ + {"json", "json", false}, + {"JSON", "json", false}, + {"toml", "toml", false}, + {"TOML", "toml", false}, + {"yaml", "", true}, + {"", "", true}, + } + for _, tt := range tests { + format, err := util.ParseOutputFormat(tt.input) + if tt.wantErr { + require.Error(t, err, "input: %s", tt.input) + } else { + require.NoError(t, err, "input: %s", tt.input) + require.Equal(t, tt.want, string(format)) + } + } +} diff --git a/cmd/util/util.go b/cmd/util/util.go index 233966055c..3425a4aecb 100644 --- a/cmd/util/util.go +++ b/cmd/util/util.go @@ -23,6 +23,7 @@ import ( "syscall" "github.com/BurntSushi/toml" + pelletier "github.com/pelletier/go-toml/v2" "github.com/pingcap/log" "github.com/pingcap/ticdc/pkg/errors" "github.com/spf13/cobra" @@ -153,6 +154,54 @@ func JSONPrint(cmd *cobra.Command, v interface{}) error { return nil } +// OutputFormat represents the output format type. +type OutputFormat string + +const ( + // OutputFormatJSON is the JSON output format. + OutputFormatJSON OutputFormat = "json" + // OutputFormatTOML is the TOML output format. + OutputFormatTOML OutputFormat = "toml" +) + +// ParseOutputFormat parses an output format string (case-insensitive). +func ParseOutputFormat(s string) (OutputFormat, error) { + switch strings.ToLower(s) { + case "json": + return OutputFormatJSON, nil + case "toml": + return OutputFormatTOML, nil + default: + return "", errors.Errorf("invalid output format '%s', must be 'json' or 'toml'", s) + } +} + +// TOMLPrint outputs data in TOML format with indented tables and multi-line +// arrays. +func TOMLPrint(cmd *cobra.Command, v interface{}) error { + var buf strings.Builder + enc := pelletier.NewEncoder(&buf) + enc.SetArraysMultiline(true) + enc.SetIndentTables(true) + if err := enc.Encode(v); err != nil { + return errors.Trace(err) + } + cmd.Printf("%s", buf.String()) + return nil +} + +// FormatPrint outputs data in the specified format (JSON or TOML). +func FormatPrint(cmd *cobra.Command, v interface{}, format OutputFormat) error { + switch format { + case OutputFormatJSON: + return JSONPrint(cmd, v) + case OutputFormatTOML: + return TOMLPrint(cmd, v) + default: + return errors.Errorf("unsupported output format: %s", format) + } +} + // Endpoint schemes. const ( HTTP = "http" diff --git a/go.mod b/go.mod index 3264e2b11b..acee873519 100644 --- a/go.mod +++ b/go.mod @@ -47,6 +47,7 @@ require ( github.com/klauspost/compress v1.18.0 github.com/linkedin/goavro/v2 v2.14.0 github.com/mailru/easyjson v0.7.7 + github.com/pelletier/go-toml/v2 v2.0.8 github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 github.com/pierrec/lz4/v4 v4.1.21 github.com/pingcap/errors v0.11.5-0.20250523034308-74f78ae071ee @@ -281,7 +282,6 @@ require ( github.com/opencontainers/runtime-spec v1.0.2 // indirect github.com/opentracing/basictracer-go v1.1.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect - github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/petermattis/goid v0.0.0-20250813065127-a731cc31b4fe // indirect github.com/philhofer/fwd v1.2.0 // indirect github.com/pierrec/lz4 v2.6.1+incompatible // indirect diff --git a/pkg/api/util.go b/pkg/api/util.go index 465b8a279d..1c43b9c67c 100644 --- a/pkg/api/util.go +++ b/pkg/api/util.go @@ -28,6 +28,8 @@ import ( const timeFormat = `"2006-01-02 15:04:05.000"` +const textTimeFormat = "2006-01-02 15:04:05.000" + // JSONTime used to wrap time into json format type JSONTime time.Time @@ -49,6 +51,23 @@ func (t *JSONTime) UnmarshalJSON(data []byte) error { return nil } +// MarshalText implements encoding.TextMarshaler for TOML and other text-based +// encoders, producing an unquoted timestamp string. +func (t JSONTime) MarshalText() ([]byte, error) { + return []byte(time.Time(t).Format(textTimeFormat)), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler for TOML and other +// text-based decoders. +func (t *JSONTime) UnmarshalText(data []byte) error { + tm, err := time.Parse(textTimeFormat, string(data)) + if err != nil { + return err + } + *t = JSONTime(tm) + return nil +} + // HTTPError of cdc http api type HTTPError struct { Error string `json:"error_msg"` diff --git a/tests/integration_tests/cli_query_toml/conf/large_filter.toml b/tests/integration_tests/cli_query_toml/conf/large_filter.toml new file mode 100644 index 0000000000..3a8f672748 --- /dev/null +++ b/tests/integration_tests/cli_query_toml/conf/large_filter.toml @@ -0,0 +1,15 @@ +[filter] + rules = [ + "db_alpha.*", + "db_bravo.*", + "db_charlie.*", + "db_delta.*", + "db_echo.*", + "db_foxtrot.*", + "db_golf.*", + "db_hotel.*", + "db_india.*", + "db_juliet.*", + "db_kilo.*", + "db_lima.*", + ] diff --git a/tests/integration_tests/cli_query_toml/conf/overrides.toml b/tests/integration_tests/cli_query_toml/conf/overrides.toml new file mode 100644 index 0000000000..57c484745c --- /dev/null +++ b/tests/integration_tests/cli_query_toml/conf/overrides.toml @@ -0,0 +1,7 @@ +case-sensitive = true + +[mounter] + worker-num = 8 + +[filter] + rules = ["test_db.*"] diff --git a/tests/integration_tests/cli_query_toml/conf/realistic.toml b/tests/integration_tests/cli_query_toml/conf/realistic.toml new file mode 100644 index 0000000000..2aca8126fd --- /dev/null +++ b/tests/integration_tests/cli_query_toml/conf/realistic.toml @@ -0,0 +1,8 @@ +case-sensitive = true + +[filter] + rules = ["test_db_not_exist.*"] + +[scheduler] + enable-table-across-nodes = true + region-threshold = 1000 diff --git a/tests/integration_tests/cli_query_toml/main.go b/tests/integration_tests/cli_query_toml/main.go new file mode 100644 index 0000000000..cea2d87366 --- /dev/null +++ b/tests/integration_tests/cli_query_toml/main.go @@ -0,0 +1,421 @@ +// Copyright 2026 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "encoding/json" + "fmt" + "os" + "strings" + + "github.com/BurntSushi/toml" +) + +func main() { + if len(os.Args) < 3 { + fmt.Fprintf(os.Stderr, "Usage: %s [args...]\n", os.Args[0]) + fmt.Fprintf(os.Stderr, "Commands: validate_toml, check_kebab_keys, compare_json_toml, check_overrides, check_filter_rules, check_indentation, check_array_format\n") + os.Exit(1) + } + + cmd := os.Args[1] + var err error + + switch cmd { + case "validate_toml": + err = validateTOML(os.Args[2]) + case "check_kebab_keys": + err = checkKebabKeys(os.Args[2]) + case "compare_json_toml": + if len(os.Args) < 4 { + fmt.Fprintf(os.Stderr, "compare_json_toml requires \n") + os.Exit(1) + } + err = compareJSONTOML(os.Args[2], os.Args[3]) + case "check_overrides": + err = checkOverrides(os.Args[2]) + case "check_filter_rules": + if len(os.Args) < 4 { + fmt.Fprintf(os.Stderr, "check_filter_rules requires \n") + os.Exit(1) + } + var expected int + fmt.Sscanf(os.Args[3], "%d", &expected) + err = checkFilterRules(os.Args[2], expected) + case "check_realistic": + err = checkRealistic(os.Args[2]) + case "check_indentation": + subcheck := "" + if len(os.Args) >= 4 { + subcheck = os.Args[3] + } + err = checkIndentation(os.Args[2], subcheck) + case "check_array_format": + err = checkArrayFormat(os.Args[2]) + case "check_default_array_format": + err = checkDefaultArrayFormat(os.Args[2]) + default: + fmt.Fprintf(os.Stderr, "Unknown command: %s\n", cmd) + os.Exit(1) + } + + if err != nil { + fmt.Fprintf(os.Stderr, "FAIL: %v\n", err) + os.Exit(1) + } + fmt.Println("PASS") +} + +func readTOML(path string) (map[string]interface{}, error) { + var data map[string]interface{} + if _, err := toml.DecodeFile(path, &data); err != nil { + return nil, fmt.Errorf("failed to parse TOML file %s: %v", path, err) + } + return data, nil +} + +func readJSON(path string) (map[string]interface{}, error) { + content, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("failed to read JSON file %s: %v", path, err) + } + var data map[string]interface{} + if err := json.Unmarshal(content, &data); err != nil { + return nil, fmt.Errorf("failed to parse JSON file %s: %v", path, err) + } + return data, nil +} + +func validateTOML(path string) error { + data, err := readTOML(path) + if err != nil { + return err + } + if _, ok := data["id"]; !ok { + return fmt.Errorf("missing 'id' field in TOML") + } + if _, ok := data["sink-uri"]; !ok { + return fmt.Errorf("missing 'sink-uri' field in TOML") + } + if _, ok := data["config"]; !ok { + return fmt.Errorf("missing 'config' section in TOML") + } + return nil +} + +func checkKebabKeys(path string) error { + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("failed to read file: %v", err) + } + text := string(content) + + snakeCaseKeys := []string{"sink_uri", "start_ts", "case_sensitive", "worker_num", + "checkpoint_tso", "checkpoint_time", "upstream_id", "creator_version"} + for _, key := range snakeCaseKeys { + if strings.Contains(text, key+" ") || strings.Contains(text, key+"=") { + return fmt.Errorf("found snake_case key '%s' in TOML output", key) + } + } + + kebabKeys := []string{"sink-uri", "start-ts", "checkpoint-tso"} + for _, key := range kebabKeys { + if !strings.Contains(text, key) { + return fmt.Errorf("missing expected kebab-case key '%s' in TOML output", key) + } + } + + return nil +} + +func compareJSONTOML(jsonPath, tomlPath string) error { + jData, err := readJSON(jsonPath) + if err != nil { + return err + } + tData, err := readTOML(tomlPath) + if err != nil { + return err + } + + if str(jData["id"]) != str(tData["id"]) { + return fmt.Errorf("id mismatch: json=%v toml=%v", jData["id"], tData["id"]) + } + if str(jData["sink_uri"]) != str(tData["sink-uri"]) { + return fmt.Errorf("sink-uri mismatch: json=%v toml=%v", jData["sink_uri"], tData["sink-uri"]) + } + if str(jData["state"]) != str(tData["state"]) { + return fmt.Errorf("state mismatch: json=%v toml=%v", jData["state"], tData["state"]) + } + + jConfig, jOk := jData["config"].(map[string]interface{}) + tConfig, tOk := tData["config"].(map[string]interface{}) + if !jOk || !tOk { + return fmt.Errorf("config section mismatch: json_ok=%v toml_ok=%v", jOk, tOk) + } + if jConfig["case_sensitive"] != tConfig["case-sensitive"] { + return fmt.Errorf("config.case-sensitive mismatch: json=%v toml=%v", + jConfig["case_sensitive"], tConfig["case-sensitive"]) + } + + jMounter, jmOk := jConfig["mounter"].(map[string]interface{}) + tMounter, tmOk := tConfig["mounter"].(map[string]interface{}) + if !jmOk || !tmOk { + return fmt.Errorf("config.mounter section mismatch: json_ok=%v toml_ok=%v", jmOk, tmOk) + } + if num(jMounter["worker_num"]) != num(tMounter["worker-num"]) { + return fmt.Errorf("config.mounter.worker-num mismatch: json=%v toml=%v", + jMounter["worker_num"], tMounter["worker-num"]) + } + + jFilter, jfOk := jConfig["filter"].(map[string]interface{}) + tFilter, tfOk := tConfig["filter"].(map[string]interface{}) + if !jfOk || !tfOk { + return fmt.Errorf("config.filter section mismatch: json_ok=%v toml_ok=%v", jfOk, tfOk) + } + jRules := toStringSlice(jFilter["rules"]) + tRules := toStringSlice(tFilter["rules"]) + if len(jRules) != len(tRules) { + return fmt.Errorf("filter.rules length mismatch: json=%d toml=%d", len(jRules), len(tRules)) + } + for i := range jRules { + if jRules[i] != tRules[i] { + return fmt.Errorf("filter.rules[%d] mismatch: json=%s toml=%s", i, jRules[i], tRules[i]) + } + } + + return nil +} + +func checkOverrides(path string) error { + data, err := readTOML(path) + if err != nil { + return err + } + config, ok := data["config"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing or invalid 'config' section") + } + if config["case-sensitive"] != true { + return fmt.Errorf("expected case-sensitive=true, got %v", config["case-sensitive"]) + } + mounter, ok := config["mounter"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing config.mounter section") + } + if num(mounter["worker-num"]) != 8 { + return fmt.Errorf("expected mounter.worker-num=8, got %v", mounter["worker-num"]) + } + filter, ok := config["filter"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing config.filter section") + } + rules := toStringSlice(filter["rules"]) + found := false + for _, r := range rules { + if r == "test_db.*" { + found = true + break + } + } + if !found { + return fmt.Errorf("filter.rules missing 'test_db.*', got %v", rules) + } + return nil +} + +func checkFilterRules(path string, expectedCount int) error { + data, err := readTOML(path) + if err != nil { + return err + } + config, ok := data["config"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing or invalid 'config' section") + } + filter, ok := config["filter"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing config.filter section") + } + rules := toStringSlice(filter["rules"]) + if len(rules) != expectedCount { + return fmt.Errorf("expected %d filter rules, got %d: %v", expectedCount, len(rules), rules) + } + return nil +} + +func str(v interface{}) string { + if v == nil { + return "" + } + return fmt.Sprintf("%v", v) +} + +func num(v interface{}) int64 { + switch n := v.(type) { + case float64: + return int64(n) + case int64: + return n + case json.Number: + i, _ := n.Int64() + return i + default: + return 0 + } +} + +func toStringSlice(v interface{}) []string { + if v == nil { + return nil + } + arr, ok := v.([]interface{}) + if !ok { + return nil + } + result := make([]string, 0, len(arr)) + for _, item := range arr { + result = append(result, fmt.Sprintf("%v", item)) + } + return result +} + +func checkRealistic(path string) error { + data, err := readTOML(path) + if err != nil { + return err + } + config, ok := data["config"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing or invalid 'config' section") + } + if config["case-sensitive"] != true { + return fmt.Errorf("expected case-sensitive=true, got %v", config["case-sensitive"]) + } + filter, ok := config["filter"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing config.filter section") + } + rules := toStringSlice(filter["rules"]) + found := false + for _, r := range rules { + if r == "test_db_not_exist.*" { + found = true + break + } + } + if !found { + return fmt.Errorf("filter.rules missing 'test_db_not_exist.*', got %v", rules) + } + scheduler, ok := config["scheduler"].(map[string]interface{}) + if !ok { + return fmt.Errorf("missing config.scheduler section") + } + if scheduler["enable-table-across-nodes"] != true { + return fmt.Errorf("expected scheduler.enable-table-across-nodes=true, got %v", + scheduler["enable-table-across-nodes"]) + } + if num(scheduler["region-threshold"]) != 1000 { + return fmt.Errorf("expected scheduler.region-threshold=1000, got %v", + scheduler["region-threshold"]) + } + return nil +} + +func checkIndentation(path string, subcheck string) error { + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("failed to read file: %v", err) + } + lines := strings.Split(string(content), "\n") + + currentDepth := 0 + inConfig := false + + for _, line := range lines { + if line == "" { + continue + } + trimmed := strings.TrimSpace(line) + + if trimmed == "[config]" { + inConfig = true + currentDepth = 1 + continue + } + if strings.HasPrefix(trimmed, "[config.") { + inConfig = true + currentDepth = strings.Count(trimmed, ".") + 1 + expectedIndent := (currentDepth - 1) * 2 + actualIndent := len(line) - len(strings.TrimLeft(line, " ")) + if (subcheck == "" || subcheck == "nested_headers") && actualIndent != expectedIndent { + return fmt.Errorf("section %s: expected %d-space indent, got %d", trimmed, expectedIndent, actualIndent) + } + continue + } + if strings.HasPrefix(trimmed, "[") { + inConfig = false + continue + } + if inConfig { + expectedIndent := currentDepth * 2 + actualIndent := len(line) - len(strings.TrimLeft(line, " ")) + if subcheck == "config_fields" && currentDepth == 1 && actualIndent < expectedIndent { + return fmt.Errorf("config field: expected >= %d-space indent, got %d: %q", + expectedIndent, actualIndent, line) + } + if subcheck == "nested_fields" && currentDepth > 1 && actualIndent < expectedIndent { + return fmt.Errorf("nested field under depth %d: expected >= %d-space indent, got %d: %q", + currentDepth, expectedIndent, actualIndent, line) + } + if subcheck == "" && actualIndent < expectedIndent { + return fmt.Errorf("field under depth %d: expected >= %d-space indent, got %d: %q", + currentDepth, expectedIndent, actualIndent, line) + } + } + } + return nil +} + +func checkDefaultArrayFormat(path string) error { + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("failed to read file: %v", err) + } + text := string(content) + expected := " rules = [\n '*.*'\n ]" + if !strings.Contains(text, expected) { + return fmt.Errorf("default filter rules not in expected multi-line format") + } + return nil +} + +func checkArrayFormat(path string) error { + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("failed to read file: %v", err) + } + text := string(content) + + if !strings.Contains(text, " rules = [\n") { + return fmt.Errorf("rules array not in multi-line format with 4-space indent") + } + if !strings.Contains(text, " 'db_alpha.*'") { + return fmt.Errorf("array elements not 6-space indented") + } + if !strings.Contains(text, " ]") { + return fmt.Errorf("closing bracket not 4-space indented") + } + return nil +} diff --git a/tests/integration_tests/cli_query_toml/run.sh b/tests/integration_tests/cli_query_toml/run.sh new file mode 100755 index 0000000000..9340280541 --- /dev/null +++ b/tests/integration_tests/cli_query_toml/run.sh @@ -0,0 +1,215 @@ +#!/bin/bash + +set -euo pipefail + +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +source $CUR/../_utils/test_prepare +WORK_DIR=$OUT_DIR/$TEST_NAME +CDC_BINARY=cdc.test +SINK_TYPE=$1 + +# This test only exercises CLI output format; no downstream sink needed. +if [ "$SINK_TYPE" != "mysql" ]; then + echo "[$(date)] <<<<<< skip $TEST_NAME for sink type $SINK_TYPE (only needs mysql/blackhole) >>>>>>" + exit 0 +fi + +function run() { + rm -rf $WORK_DIR && mkdir -p $WORK_DIR + + start_tidb_cluster --workdir $WORK_DIR + run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY + + # Build Go validator + cd $CUR && GO111MODULE=on go build -o $WORK_DIR/toml_validator . && cd - + VALIDATOR="$WORK_DIR/toml_validator" + + # --- Create changefeeds --- + cdc_cli_changefeed create --sink-uri="blackhole://" -c "cf-default" + cdc_cli_changefeed create --sink-uri="blackhole://" -c "cf-overrides" \ + --config="$CUR/conf/overrides.toml" + cdc_cli_changefeed create --sink-uri="blackhole://" -c "cf-large-filter" \ + --config="$CUR/conf/large_filter.toml" + cdc_cli_changefeed create \ + --sink-uri="blackhole://root:secret@10.189.5.160:4000?safe-mode=true" \ + -c "cf-realistic" --config="$CUR/conf/realistic.toml" + + # Wait for changefeeds to be running + check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" "cf-default" "normal" "null" "" + check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" "cf-overrides" "normal" "null" "" + check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" "cf-large-filter" "normal" "null" "" + check_changefeed_state "http://${UP_PD_HOST_1}:${UP_PD_PORT_1}" "cf-realistic" "normal" "null" "" + + # --- Capture outputs --- + cdc_cli_changefeed query -c "cf-default" | grep -v "Command to ticdc" >"$WORK_DIR/default.json" + cdc_cli_changefeed query -c "cf-default" --output toml | grep -v "Command to ticdc" >"$WORK_DIR/default.toml" + cdc_cli_changefeed query -c "cf-default" -o toml | grep -v "Command to ticdc" >"$WORK_DIR/default_short.toml" + cdc_cli_changefeed query -c "cf-overrides" | grep -v "Command to ticdc" >"$WORK_DIR/overrides.json" + cdc_cli_changefeed query -c "cf-overrides" --output toml | grep -v "Command to ticdc" >"$WORK_DIR/overrides.toml" + cdc_cli_changefeed query -c "cf-large-filter" | grep -v "Command to ticdc" >"$WORK_DIR/large_filter.json" + cdc_cli_changefeed query -c "cf-large-filter" --output toml | grep -v "Command to ticdc" >"$WORK_DIR/large_filter.toml" + cdc_cli_changefeed query -c "cf-default" --simple --output toml | grep -v "Command to ticdc" >"$WORK_DIR/simple.toml" + cdc_cli_changefeed query -c "cf-realistic" | grep -v "Command to ticdc" >"$WORK_DIR/realistic.json" + cdc_cli_changefeed query -c "cf-realistic" --output toml | grep -v "Command to ticdc" >"$WORK_DIR/realistic.toml" + + # --- Test 1: JSON output valid --- + jq -e '.id == "cf-default"' "$WORK_DIR/default.json" >/dev/null + jq -e '.config != null' "$WORK_DIR/default.json" >/dev/null + jq -e '.sink_uri == "blackhole:"' "$WORK_DIR/default.json" >/dev/null + echo "PASS: Test 1 - JSON output valid" + + # --- Test 2: TOML output valid & parseable --- + $VALIDATOR validate_toml "$WORK_DIR/default.toml" + echo "PASS: Test 2 - TOML output valid" + + # --- Test 3: Kebab-case keys, no snake_case --- + $VALIDATOR check_kebab_keys "$WORK_DIR/default.toml" + echo "PASS: Test 3 - Kebab-case keys verified" + + # --- Test 4: Section structure --- + grep -q "\[config\]" "$WORK_DIR/default.toml" + grep -q "\[config\.filter\]" "$WORK_DIR/default.toml" + grep -q "\[config\.mounter\]" "$WORK_DIR/default.toml" + grep -q "\[config\.sink\]" "$WORK_DIR/default.toml" + echo "PASS: Test 4 - TOML section structure correct" + + # --- Test 5: Duration formatting --- + grep -q "sync-point-interval = '10m0s'" "$WORK_DIR/default.toml" + if grep -qE "600000000000|86400000000000" "$WORK_DIR/default.toml"; then + echo "FAIL: Test 5 - raw nanosecond values found in TOML" + exit 1 + fi + echo "PASS: Test 5 - Durations are human-readable" + + # --- Test 6: Timestamp formatting --- + if grep -qE "create-time = '[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}'" "$WORK_DIR/default.toml"; then + echo "PASS: Test 6 - Timestamp is human-readable" + else + echo "FAIL: Test 6 - create-time not in expected format" + grep "create-time" "$WORK_DIR/default.toml" || true + exit 1 + fi + + # --- Test 7: JSON/TOML correspondence --- + $VALIDATOR compare_json_toml "$WORK_DIR/default.json" "$WORK_DIR/default.toml" + echo "PASS: Test 7 - JSON/TOML field correspondence" + + # --- Test 8: Overrides config values --- + $VALIDATOR check_overrides "$WORK_DIR/overrides.toml" + echo "PASS: Test 8 - Overrides config values correct" + + # --- Test 9: Overrides JSON/TOML match --- + $VALIDATOR compare_json_toml "$WORK_DIR/overrides.json" "$WORK_DIR/overrides.toml" + echo "PASS: Test 9 - Overrides JSON/TOML match" + + # --- Test 10: Large filter rules --- + $VALIDATOR check_filter_rules "$WORK_DIR/large_filter.toml" 12 + echo "PASS: Test 10 - All 12 filter rules present" + + # --- Test 11: Simplified query - no config section --- + if grep -q "\[config\]" "$WORK_DIR/simple.toml"; then + echo "FAIL: Test 11 - simplified output should not have [config] section" + exit 1 + fi + echo "PASS: Test 11 - Simplified query has no config" + + # --- Test 12: Simplified query - has status fields --- + grep -q "checkpoint-tso" "$WORK_DIR/simple.toml" + grep -q "upstream-id" "$WORK_DIR/simple.toml" + echo "PASS: Test 12 - Simplified query has status fields" + + # --- Test 13: Invalid format error --- + set +e + bad_output=$(cdc_cli_changefeed query -c "cf-default" --output yaml 2>&1) + rc=$? + set -e + if [ "$rc" -ne 0 ] && echo "$bad_output" | grep -qi "invalid\|unknown\|unsupported"; then + echo "PASS: Test 13 - Invalid format produces error" + else + echo "FAIL: Test 13 - expected non-zero exit with error message, rc=$rc, output: $bad_output" + exit 1 + fi + + # --- Test 14: Default output is JSON --- + jq -e '.id == "cf-default"' "$WORK_DIR/default.json" >/dev/null + echo "PASS: Test 14 - Default output is valid JSON" + + # --- Test 15: -o toml short flag --- + $VALIDATOR validate_toml "$WORK_DIR/default_short.toml" + echo "PASS: Test 15 - Short flag -o toml works" + + # --- Test 16: Single-quote string values --- + grep -q "id = 'cf-default'" "$WORK_DIR/default.toml" + grep -q "sink-uri = 'blackhole:'" "$WORK_DIR/default.toml" + echo "PASS: Test 16 - Single-quote string values" + + # --- Test 17: Overrides JSON validation --- + jq -e '.config.case_sensitive == true' "$WORK_DIR/overrides.json" >/dev/null + jq -e '.config.mounter.worker_num == 8' "$WORK_DIR/overrides.json" >/dev/null + echo "PASS: Test 17 - Overrides JSON output correct" + + # --- Test 18: Password redaction in JSON --- + if jq -e '.sink_uri | contains("secret")' "$WORK_DIR/realistic.json" >/dev/null 2>&1; then + echo "FAIL: Test 18 - password not redacted in JSON" + exit 1 + fi + jq -e '.sink_uri | contains("xxxxx")' "$WORK_DIR/realistic.json" >/dev/null + echo "PASS: Test 18 - Password redacted in JSON" + + # --- Test 19: Password redaction in TOML --- + if grep -q "secret" "$WORK_DIR/realistic.toml"; then + echo "FAIL: Test 19 - password not redacted in TOML" + exit 1 + fi + grep -q "xxxxx" "$WORK_DIR/realistic.toml" + echo "PASS: Test 19 - Password redacted in TOML" + + # --- Test 20: Realistic config values in TOML --- + $VALIDATOR check_realistic "$WORK_DIR/realistic.toml" + echo "PASS: Test 20 - Realistic config values correct" + + # --- Test 21: Realistic JSON/TOML match --- + $VALIDATOR compare_json_toml "$WORK_DIR/realistic.json" "$WORK_DIR/realistic.toml" + echo "PASS: Test 21 - Realistic JSON/TOML match" + + # --- Test 22: Default single-element array format --- + $VALIDATOR check_default_array_format "$WORK_DIR/default.toml" + echo "PASS: Test 22 - Default single-element array multi-line format" + + # --- Test 23: Config fields 2-space indented --- + $VALIDATOR check_indentation "$WORK_DIR/default.toml" config_fields + echo "PASS: Test 23 - Config fields indented" + + # --- Test 24: Nested section headers indented --- + $VALIDATOR check_indentation "$WORK_DIR/default.toml" nested_headers + echo "PASS: Test 24 - Nested section headers indented" + + # --- Test 25: Nested section fields indented --- + $VALIDATOR check_indentation "$WORK_DIR/default.toml" nested_fields + echo "PASS: Test 25 - Nested section fields indented" + + # --- Test 26: Large filter rules present --- + jq -e '.config.filter.rules | length == 12' "$WORK_DIR/large_filter.json" >/dev/null + echo "PASS: Test 26 - Large filter JSON valid" + + # --- Test 27: Large filter array block format --- + $VALIDATOR check_array_format "$WORK_DIR/large_filter.toml" + echo "PASS: Test 27 - Large filter array block format correct" + + # --- Test 28: Large filter JSON/TOML match --- + $VALIDATOR compare_json_toml "$WORK_DIR/large_filter.json" "$WORK_DIR/large_filter.toml" + echo "PASS: Test 28 - Large filter JSON/TOML match" + + # Cleanup changefeeds + cdc_cli_changefeed --changefeed-id "cf-default" remove + cdc_cli_changefeed --changefeed-id "cf-overrides" remove + cdc_cli_changefeed --changefeed-id "cf-large-filter" remove + cdc_cli_changefeed --changefeed-id "cf-realistic" remove + + cleanup_process $CDC_BINARY +} + +trap 'stop_test $WORK_DIR' EXIT +run "$@" +check_logs $WORK_DIR +echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" diff --git a/tests/integration_tests/run_light_it_in_ci.sh b/tests/integration_tests/run_light_it_in_ci.sh index 4859892068..70c7e109a0 100755 --- a/tests/integration_tests/run_light_it_in_ci.sh +++ b/tests/integration_tests/run_light_it_in_ci.sh @@ -59,7 +59,7 @@ mysql_groups=( # ds_memory_control 'row_format tiflash multi_rocks fail_over_ddl_M correctness_for_shared_column_schema partial_index' # G13 - 'cli_tls_with_auth cli_with_auth fail_over_ddl_N maintainer_failover_when_operator cli_missing_keyspace_error' + 'cli_tls_with_auth cli_with_auth cli_query_toml fail_over_ddl_N maintainer_failover_when_operator cli_missing_keyspace_error' # G14 'batch_add_table batch_update_to_no_batch fail_over_ddl_O update_changefeed_check_config pause_changefeed_with_long_time_ddl' # G15 @@ -97,7 +97,7 @@ kafka_groups=( # G12 'row_format tiflash multi_rocks fail_over_ddl_M correctness_for_shared_column_schema partial_index' # G13 - 'cli_tls_with_auth cli_with_auth fail_over_ddl_N maintainer_failover_when_operator' + 'cli_tls_with_auth cli_with_auth cli_query_toml fail_over_ddl_N maintainer_failover_when_operator' # G14 'kafka_simple_basic avro_basic debezium_basic fail_over_ddl_O update_changefeed_check_config' # G15 @@ -135,7 +135,7 @@ pulsar_groups=( # G12 'row_format tiflash multi_rocks fail_over_ddl_M correctness_for_shared_column_schema partial_index' # G13 - 'cli_tls_with_auth cli_with_auth fail_over_ddl_N maintainer_failover_when_operator' + 'cli_tls_with_auth cli_with_auth cli_query_toml fail_over_ddl_N maintainer_failover_when_operator' # G14 'avro_basic debezium_basic fail_over_ddl_O update_changefeed_check_config' # G15 @@ -173,7 +173,7 @@ storage_groups=( # G12 'row_format tiflash multi_rocks fail_over_ddl_M correctness_for_shared_column_schema partial_index' # G13 - 'cli_tls_with_auth cli_with_auth fail_over_ddl_N maintainer_failover_when_operator' + 'cli_tls_with_auth cli_with_auth cli_query_toml fail_over_ddl_N maintainer_failover_when_operator' # G14 'csv_storage_partition_table csv_storage_multi_tables_ddl fail_over_ddl_O update_changefeed_check_config' # G15