diff --git a/CODEOWNERS b/CODEOWNERS index f2cc217de78db..e48a4b44e84d8 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -152,12 +152,12 @@ /pkg/sql @aunjgr /pkg/sql/colexec @ouyuanning @aunjgr /pkg/sql/compile @ouyuanning @aunjgr +/pkg/sql/function @ouyuanning @aunjgr /pkg/sql/models @aptend /pkg/sql/parsers @iamlinjunhong -/pkg/sql/plan @ouyuanning @aunjgr -/pkg/sql/plan/function @ouyuanning @aunjgr -/pkg/sql/plan/explain @ouyuanning @aunjgr -/pkg/sql/plan/tools @gouhongshen +/pkg/sql/planner @ouyuanning @aunjgr +/pkg/sql/planner/explain @ouyuanning @aunjgr +/pkg/sql/planner/tools @gouhongshen /pkg/sql/colexec/indexjoin @aunjgr /pkg/sql/colexec/indexbuild @aunjgr /pkg/sql/colexec/intersect @aunjgr diff --git a/pkg/backup/tae.go b/pkg/backup/tae.go index d57613df8aa37..2018abacecbc2 100644 --- a/pkg/backup/tae.go +++ b/pkg/backup/tae.go @@ -29,9 +29,6 @@ import ( "sync" "time" - "github.com/matrixorigin/matrixone/pkg/objectio/ioutil" - "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/db/gc/v3" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/runtime" "github.com/matrixorigin/matrixone/pkg/container/types" @@ -39,10 +36,12 @@ import ( "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/objectio" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/objectio/ioutil" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/common" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/db/checkpoint" + "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/db/gc/v3" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/logtail" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/tasks" ) diff --git a/pkg/cdc/sinker_v2.go b/pkg/cdc/sinker_v2.go index e061a0a4eafa6..e18fe297fbe0f 100644 --- a/pkg/cdc/sinker_v2.go +++ b/pkg/cdc/sinker_v2.go @@ -27,7 +27,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "go.uber.org/zap" ) @@ -190,7 +190,7 @@ var CreateMysqlSinker2 = func( return nil, moerr.NewInternalErrorNoCtx("external table is not supported") } - createSql, _, err = plan2.ConstructCreateTableSQL(nil, &newTableDef, nil, true, nil) + createSql, _, err = planner.ConstructCreateTableSQL(nil, &newTableDef, nil, true, nil) if err != nil { executor.Close() return nil, err diff --git a/pkg/cdc/sinker_v2_sql_builder.go b/pkg/cdc/sinker_v2_sql_builder.go index 377818598a9c2..2579e59774999 100644 --- a/pkg/cdc/sinker_v2_sql_builder.go +++ b/pkg/cdc/sinker_v2_sql_builder.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" ) // CDCStatementBuilder constructs SQL statements for CDC sink operations. diff --git a/pkg/cnservice/server_query.go b/pkg/cnservice/server_query.go index d4aeb3f568e7e..868e591603389 100644 --- a/pkg/cnservice/server_query.go +++ b/pkg/cnservice/server_query.go @@ -37,7 +37,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/queryservice" qclient "github.com/matrixorigin/matrixone/pkg/queryservice/client" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" diff --git a/pkg/cnservice/server_query_test.go b/pkg/cnservice/server_query_test.go index 30ea75fe0f2eb..e90fbda0e4f44 100644 --- a/pkg/cnservice/server_query_test.go +++ b/pkg/cnservice/server_query_test.go @@ -24,8 +24,6 @@ import ( "unsafe" "github.com/golang/mock/gomock" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/defines" @@ -51,6 +49,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/trace" "github.com/matrixorigin/matrixone/pkg/vm/engine" + "github.com/stretchr/testify/require" ) var dummyBadRequestErr = moerr.NewInternalError(context.TODO(), "bad request") @@ -734,7 +733,7 @@ func Test_service_handleCtlReader(t *testing.T) { fields: fields{}, args: args{ ctx: ctx, - // more details in pkg/sql/plan/function/ctl/reader.go::handleCtlReader + // more details in pkg/sql/function/ctl/reader.go::handleCtlReader req: &query.Request{CtlReaderRequest: &query.CtlReaderRequest{ Cmd: "enable", Cfg: "force_shuffle", @@ -821,7 +820,7 @@ func Test_service_handleRunTask(t *testing.T) { fields: fields{}, args: args{ ctx: ctx, - // more details in pkg/sql/plan/function/ctl/reader.go::handleCtlReader + // more details in pkg/sql/function/ctl/reader.go::handleCtlReader req: &query.Request{RunTask: &query.RunTaskRequest{ TaskCode: -1, }}, @@ -837,7 +836,7 @@ func Test_service_handleRunTask(t *testing.T) { }, args: args{ ctx: ctx, - // more details in pkg/sql/plan/function/ctl/reader.go::handleCtlReader + // more details in pkg/sql/function/ctl/reader.go::handleCtlReader req: &query.Request{RunTask: &query.RunTaskRequest{ TaskCode: 1, }}, diff --git a/pkg/common/moerr/cause.go b/pkg/common/moerr/cause.go index 8d3c83eddb622..de262992203cf 100644 --- a/pkg/common/moerr/cause.go +++ b/pkg/common/moerr/cause.go @@ -161,7 +161,7 @@ var ( CauseGetProcByUuid = NewInternalError(context.Background(), "GetProcByUuid") CauseGenInsertMOIndexesSql = NewInternalError(context.Background(), "genInsertMOIndexesSql") CauseGenInsertMOIndexesSql2 = NewInternalError(context.Background(), "genInsertMOIndexesSql 2") - //pkg/sql/plan/function/ctl + //pkg/sql/function/ctl CauseHandleCoreDump = NewInternalError(context.Background(), "handleCoreDump") CauseHandleSyncCommit = NewInternalError(context.Background(), "handleSyncCommit") CauseHandleRemoveRemoteLockTable = NewInternalError(context.Background(), "handleRemoveRemoteLockTable") diff --git a/pkg/container/vector/functionTools.go b/pkg/container/vector/functionTools.go index 7489367636bd2..5fa54dad0a759 100644 --- a/pkg/container/vector/functionTools.go +++ b/pkg/container/vector/functionTools.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/bytejson" "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" - functionUtil "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + functionUtil "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" ) // FunctionParameterWrapper is generated from a vector. diff --git a/pkg/frontend/authenticate.go b/pkg/frontend/authenticate.go index 667cb344d2774..ccb3dde010e52 100644 --- a/pkg/frontend/authenticate.go +++ b/pkg/frontend/authenticate.go @@ -30,9 +30,6 @@ import ( "sync/atomic" "time" - "github.com/tidwall/btree" - "golang.org/x/sync/errgroup" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -49,12 +46,12 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/query" "github.com/matrixorigin/matrixone/pkg/pb/task" "github.com/matrixorigin/matrixone/pkg/queryservice" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/stage" "github.com/matrixorigin/matrixone/pkg/stage/stageutil" @@ -65,6 +62,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/util/trace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" + "github.com/tidwall/btree" + "golang.org/x/sync/errgroup" ) type TenantInfo struct { @@ -4559,7 +4558,7 @@ func doDropFunction(ctx context.Context, ses *Session, df *tree.DropFunction, rm if execResultArrayHasData(erArray) { receivedArgsType := make([]string, len(df.Args)) for i, arg := range df.Args { - typ, err := plan2.GetFunctionArgTypeStrFromAst(arg) + typ, err := planner.GetFunctionArgTypeStrFromAst(arg) if err != nil { return err } @@ -6043,7 +6042,7 @@ func (pota privilegeTipsArray) String() string { } // extractPrivilegeTipsFromPlan extracts the privilege tips from the plan -func extractPrivilegeTipsFromPlan(p *plan2.Plan) privilegeTipsArray { +func extractPrivilegeTipsFromPlan(p *plan.Plan) privilegeTipsArray { //NOTE: the pts may be nil when the plan does operate any table. var pts privilegeTipsArray appendPt := func(pt privilegeTips) { @@ -7167,7 +7166,7 @@ func checkRoleWhetherDatabaseOwner(ctx context.Context, ses *Session, dbName str func authenticateUserCanExecuteStatementWithObjectTypeDatabaseAndTable(ctx context.Context, ses *Session, stmt tree.Statement, - p *plan2.Plan) (bool, statistic.StatsArray, error) { + p *plan.Plan) (bool, statistic.StatsArray, error) { var stats statistic.StatsArray stats.Reset() @@ -8718,7 +8717,7 @@ func InitFunction(ses *Session, execCtx *ExecCtx, tenant *TenantInfo, cf *tree.C // format return type fmtctx = tree.NewFmtCtx(dialect.MYSQL, tree.WithQuoteString(true)) - retTypeStr, err = plan2.GetFunctionTypeStrFromAst(cf.ReturnType.Type) + retTypeStr, err = planner.GetFunctionTypeStrFromAst(cf.ReturnType.Type) if err != nil { return err } @@ -8730,7 +8729,7 @@ func InitFunction(ses *Session, execCtx *ExecCtx, tenant *TenantInfo, cf *tree.C argList[i] = &function.Arg{} argList[i].Name = cf.Args[i].GetName(fmtctx) fmtctx.Reset() - typ, err := plan2.GetFunctionArgTypeStrFromAst(cf.Args[i]) + typ, err := planner.GetFunctionArgTypeStrFromAst(cf.Args[i]) if err != nil { return err } @@ -8924,14 +8923,14 @@ func InitProcedure(ctx context.Context, ses *Session, tenant *TenantInfo, cp *tr } initMoProcedure = fmt.Sprintf(updateMoStoredProcedureFormat, string(argsJson), - cp.Lang, plan2.EscapeFormat(cp.Body), dbName, + cp.Lang, planner.EscapeFormat(cp.Body), dbName, tenant.GetUser(), types.CurrentTimestamp().String2(time.UTC, 0), "PROCEDURE", "DEFINER", "", "utf8mb4", "utf8mb4_0900_ai_ci", "utf8mb4_0900_ai_ci", int32(id)) } else { initMoProcedure = fmt.Sprintf(initMoStoredProcedureFormat, string(cp.Name.Name.ObjectName), string(argsJson), - cp.Lang, plan2.EscapeFormat(cp.Body), dbName, + cp.Lang, planner.EscapeFormat(cp.Body), dbName, tenant.GetUser(), types.CurrentTimestamp().String2(time.UTC, 0), types.CurrentTimestamp().String2(time.UTC, 0), "PROCEDURE", "DEFINER", "", "utf8mb4", "utf8mb4_0900_ai_ci", "utf8mb4_0900_ai_ci") } err = bh.Exec(ctx, initMoProcedure) diff --git a/pkg/frontend/authenticate2.go b/pkg/frontend/authenticate2.go index a46659e25cc1d..0d33e72e8379f 100644 --- a/pkg/frontend/authenticate2.go +++ b/pkg/frontend/authenticate2.go @@ -19,7 +19,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) // verifyAccountCanOperateClusterTable determines the account can operate @@ -229,7 +229,7 @@ var privilegeCacheIsEnabled = func(ctx context.Context, ses *Session) (bool, err } // hasMoCtrl checks whether the plan has mo_ctrl -func hasMoCtrl(p *plan2.Plan) bool { +func hasMoCtrl(p *plan.Plan) bool { if p != nil && p.GetQuery() != nil { //select,insert select, update, delete q := p.GetQuery() if q.StmtType == plan.Query_INSERT || q.StmtType == plan.Query_SELECT { @@ -239,7 +239,7 @@ func hasMoCtrl(p *plan2.Plan) bool { // select mo_ctrl ... // insert into ... select mo_ctrl ... for _, proj := range node.ProjectList { - if plan2.HasMoCtrl(proj) { + if planner.HasMoCtrl(proj) { return true } } @@ -253,7 +253,7 @@ func hasMoCtrl(p *plan2.Plan) bool { // isTargetSysWhiteList checks if ALL DML target tables are in the whitelist. // Returns true only when all target tables are in the whitelist. // Returns false if any target table is not in the whitelist, or if there are no DML target tables. -func isTargetSysWhiteList(p *plan2.Plan) bool { +func isTargetSysWhiteList(p *plan.Plan) bool { if p == nil || p.GetQuery() == nil { return false } diff --git a/pkg/frontend/authenticate2_test.go b/pkg/frontend/authenticate2_test.go index e434166d282b1..b17c49ff8bc4f 100644 --- a/pkg/frontend/authenticate2_test.go +++ b/pkg/frontend/authenticate2_test.go @@ -22,8 +22,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/matrixorigin/matrixone/pkg/catalog" + "github.com/matrixorigin/matrixone/pkg/pb/plan" plan3 "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" ) func Test_verifyAccountCanOperateClusterTable(t *testing.T) { @@ -199,12 +199,12 @@ func Test_hasMoCtrl(t *testing.T) { ret = hasMoCtrl(nil) assert.False(t, ret) - ret = hasMoCtrl(&plan2.Plan{}) + ret = hasMoCtrl(&plan.Plan{}) assert.False(t, ret) - ret = hasMoCtrl(&plan2.Plan{ - Plan: &plan2.Plan_Query{ - Query: &plan2.Query{ + ret = hasMoCtrl(&plan.Plan{ + Plan: &plan.Plan_Query{ + Query: &plan.Query{ StmtType: plan3.Query_SELECT, Nodes: []*plan3.Node{ { diff --git a/pkg/frontend/authenticate_test.go b/pkg/frontend/authenticate_test.go index e602721a8cc8f..ee7c0e69bfce3 100644 --- a/pkg/frontend/authenticate_test.go +++ b/pkg/frontend/authenticate_test.go @@ -48,7 +48,6 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/queryservice" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/stage" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" @@ -3430,18 +3429,18 @@ func Test_determineDropTable(t *testing.T) { func Test_determineDML(t *testing.T) { type arg struct { stmt tree.Statement - p *plan2.Plan + p *plan.Plan } args := []arg{ { stmt: &tree.Select{}, - p: &plan2.Plan{ - Plan: &plan2.Plan_Query{ - Query: &plan2.Query{ - Nodes: []*plan2.Node{ - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "t", ObjName: "a"}}, - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "s", ObjName: "b"}}, + p: &plan.Plan{ + Plan: &plan.Plan_Query{ + Query: &plan.Query{ + Nodes: []*plan.Node{ + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "t", ObjName: "a"}}, + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "s", ObjName: "b"}}, }, }, }, @@ -3449,12 +3448,12 @@ func Test_determineDML(t *testing.T) { }, { stmt: &tree.Update{}, - p: &plan2.Plan{ - Plan: &plan2.Plan_Query{ - Query: &plan2.Query{ - Nodes: []*plan2.Node{ - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "t", ObjName: "a"}}, - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "s", ObjName: "b"}}, + p: &plan.Plan{ + Plan: &plan.Plan_Query{ + Query: &plan.Query{ + Nodes: []*plan.Node{ + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "t", ObjName: "a"}}, + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "s", ObjName: "b"}}, {NodeType: plan.Node_INSERT}, }, }, @@ -3463,12 +3462,12 @@ func Test_determineDML(t *testing.T) { }, { stmt: &tree.Delete{}, - p: &plan2.Plan{ - Plan: &plan2.Plan_Query{ - Query: &plan2.Query{ - Nodes: []*plan2.Node{ - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "t", ObjName: "a"}}, - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "s", ObjName: "b"}}, + p: &plan.Plan{ + Plan: &plan.Plan_Query{ + Query: &plan.Query{ + Nodes: []*plan.Node{ + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "t", ObjName: "a"}}, + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "s", ObjName: "b"}}, {NodeType: plan.Node_DELETE}, }, }, @@ -3477,13 +3476,13 @@ func Test_determineDML(t *testing.T) { }, { //insert into select stmt: &tree.Insert{}, - p: &plan2.Plan{ - Plan: &plan2.Plan_Query{ - Query: &plan2.Query{ - Nodes: []*plan2.Node{ - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "t", ObjName: "a"}}, - {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan2.ObjectRef{SchemaName: "s", ObjName: "b"}}, - {NodeType: plan.Node_INSERT, ObjRef: &plan2.ObjectRef{SchemaName: "s", ObjName: "b"}}, + p: &plan.Plan{ + Plan: &plan.Plan_Query{ + Query: &plan.Query{ + Nodes: []*plan.Node{ + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "t", ObjName: "a"}}, + {NodeType: plan.Node_TABLE_SCAN, ObjRef: &plan.ObjectRef{SchemaName: "s", ObjName: "b"}}, + {NodeType: plan.Node_INSERT, ObjRef: &plan.ObjectRef{SchemaName: "s", ObjName: "b"}}, }, }, }, diff --git a/pkg/frontend/back_exec.go b/pkg/frontend/back_exec.go index 3ae4660401521..c95bcce6e764f 100644 --- a/pkg/frontend/back_exec.go +++ b/pkg/frontend/back_exec.go @@ -38,7 +38,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util" "github.com/matrixorigin/matrixone/pkg/util/trace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" @@ -1126,7 +1126,7 @@ func (backSes *backSession) GetStorage() engine.Engine { return getPu(backSes.GetService()).StorageEngine } -func (backSes *backSession) GetStatsCache() *plan2.StatsCache { +func (backSes *backSession) GetStatsCache() *planner.StatsCache { return nil } diff --git a/pkg/frontend/back_result_row_stmt.go b/pkg/frontend/back_result_row_stmt.go index f17ef7a23c424..7e0a9b7d3433e 100644 --- a/pkg/frontend/back_result_row_stmt.go +++ b/pkg/frontend/back_result_row_stmt.go @@ -17,11 +17,10 @@ package frontend import ( "time" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" + "go.uber.org/zap" ) func executeResultRowStmtInBack(backSes *backSession, @@ -43,7 +42,7 @@ func executeResultRowStmtInBack(backSes *backSession, mrs.AddColumn(mysqlc) } - backSes.rs = &plan.ResultColDef{ResultCols: plan2.GetResultColumnsFromPlan(execCtx.cw.Plan())} + backSes.rs = &plan.ResultColDef{ResultCols: planner.GetResultColumnsFromPlan(execCtx.cw.Plan())} fPrintTxnOp := execCtx.ses.GetTxnHandler().GetTxn() diff --git a/pkg/frontend/clone.go b/pkg/frontend/clone.go index 02a3b46a51666..266289d4f0023 100644 --- a/pkg/frontend/clone.go +++ b/pkg/frontend/clone.go @@ -25,12 +25,12 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/objectio" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) @@ -112,7 +112,7 @@ func resolveSnapshot( ) if atTsExpr != nil { - builder := plan.NewQueryBuilder(plan2.Query_INSERT, ses.txnCompileCtx, false, true) + builder := planner.NewQueryBuilder(plan.Query_INSERT, ses.txnCompileCtx, false, true) if snapshot, err = builder.ResolveTsHint(atTsExpr); err != nil { return nil, err } @@ -127,7 +127,7 @@ func getOpAndToAccountId( bh BackgroundExec, toAccountOpt *tree.ToAccountOpt, atTsExpr *tree.AtTimeStamp, -) (opAccountId, toAccountId uint32, snapshot *plan2.Snapshot, err error) { +) (opAccountId, toAccountId uint32, snapshot *plan.Snapshot, err error) { if snapshot, err = resolveSnapshot(ses, atTsExpr); err != nil { return 0, 0, nil, err @@ -164,7 +164,7 @@ func handleCloneTable( deferred func(error) error faultInjected bool - snapshot *plan2.Snapshot + snapshot *plan.Snapshot snapshotTS int64 toAccountId uint32 @@ -316,7 +316,7 @@ func handleCloneDatabase( ctx1 context.Context srcTblInfos []*tableInfo - snapshot *plan2.Snapshot + snapshot *plan.Snapshot viewMap = make(map[string]*tableInfo) @@ -326,7 +326,7 @@ func handleCloneDatabase( snapCondition string snapshotTS int64 - subMeta *plan2.SubscriptionMeta + subMeta *plan.SubscriptionMeta ) oldDefault := ses.GetTxnCompileCtx().DefaultDatabase() diff --git a/pkg/frontend/compiler_context.go b/pkg/frontend/compiler_context.go index e2d3015c61f07..a20de1ecceb1a 100644 --- a/pkg/frontend/compiler_context.go +++ b/pkg/frontend/compiler_context.go @@ -26,8 +26,6 @@ import ( "sync" "time" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/pubsub" @@ -37,26 +35,27 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/perfcounter" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/common" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" ) -var _ plan2.CompilerContext = &TxnCompilerContext{} +var _ planner.CompilerContext = &TxnCompilerContext{} type TxnCompilerContext struct { dbName string buildAlterView bool dbOfView, nameOfView string sub *plan.SubscriptionMeta - snapshot *plan2.Snapshot + snapshot *plan.Snapshot views []string //for support explain analyze tcw ComputationWrapper @@ -106,13 +105,13 @@ func (tcc *TxnCompilerContext) SetViews(views []string) { tcc.views = views } -func (tcc *TxnCompilerContext) GetSnapshot() *plan2.Snapshot { +func (tcc *TxnCompilerContext) GetSnapshot() *plan.Snapshot { tcc.mu.Lock() defer tcc.mu.Unlock() return tcc.snapshot } -func (tcc *TxnCompilerContext) SetSnapshot(snapshot *plan2.Snapshot) { +func (tcc *TxnCompilerContext) SetSnapshot(snapshot *plan.Snapshot) { tcc.mu.Lock() defer tcc.mu.Unlock() tcc.snapshot = snapshot @@ -123,7 +122,7 @@ func (tcc *TxnCompilerContext) InitExecuteStmtParam(execPlan *plan.Execute) (*pl return p, st, err } -func (tcc *TxnCompilerContext) GetStatsCache() *plan2.StatsCache { +func (tcc *TxnCompilerContext) GetStatsCache() *planner.StatsCache { tcc.mu.Lock() defer tcc.mu.Unlock() return tcc.execCtx.ses.GetStatsCache() @@ -203,13 +202,13 @@ func (tcc *TxnCompilerContext) SetContext(ctx context.Context) { tcc.execCtx.reqCtx = ctx } -func (tcc *TxnCompilerContext) DatabaseExists(name string, snapshot *plan2.Snapshot) bool { +func (tcc *TxnCompilerContext) DatabaseExists(name string, snapshot *plan.Snapshot) bool { var err error tempCtx := tcc.execCtx.reqCtx txn := tcc.GetTxnHandler().GetTxn() // change txn to snapshot txn - if plan2.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { txn = txn.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -231,7 +230,7 @@ func (tcc *TxnCompilerContext) DatabaseExists(name string, snapshot *plan2.Snaps return true } -func (tcc *TxnCompilerContext) GetDatabaseId(dbName string, snapshot *plan2.Snapshot) (uint64, error) { +func (tcc *TxnCompilerContext) GetDatabaseId(dbName string, snapshot *plan.Snapshot) (uint64, error) { dbName, _, err := tcc.ensureDatabaseIsNotEmpty(dbName, false, snapshot) if err != nil { return 0, err @@ -240,7 +239,7 @@ func (tcc *TxnCompilerContext) GetDatabaseId(dbName string, snapshot *plan2.Snap txn := tcc.GetTxnHandler().GetTxn() // change txn to snapshot txn - if plan2.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { txn = txn.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -301,7 +300,7 @@ func (tcc *TxnCompilerContext) getRelation( dbName string, tableName string, sub *plan.SubscriptionMeta, - snapshot *plan2.Snapshot, + snapshot *plan.Snapshot, ) (context.Context, engine.Relation, error) { dbName, _, err := tcc.ensureDatabaseIsNotEmpty(dbName, false, snapshot) if err != nil { @@ -319,7 +318,7 @@ func (tcc *TxnCompilerContext) getRelation( v2.GetRelationDurationHistogram.Observe(time.Since(start).Seconds()) }() - if plan2.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { txn = txn.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -379,7 +378,7 @@ func (tcc *TxnCompilerContext) getRelation( return tempCtx, table, nil } -func (tcc *TxnCompilerContext) ensureDatabaseIsNotEmpty(dbName string, checkSub bool, snapshot *plan2.Snapshot) (string, *plan.SubscriptionMeta, error) { +func (tcc *TxnCompilerContext) ensureDatabaseIsNotEmpty(dbName string, checkSub bool, snapshot *plan.Snapshot) (string, *plan.SubscriptionMeta, error) { start := time.Now() defer func() { v2.EnsureDatabaseDurationHistogram.Observe(time.Since(start).Seconds()) @@ -401,11 +400,11 @@ func (tcc *TxnCompilerContext) ensureDatabaseIsNotEmpty(dbName string, checkSub return dbName, sub, nil } -func (tcc *TxnCompilerContext) ResolveById(tableId uint64, snapshot *plan2.Snapshot) (*plan2.ObjectRef, *plan2.TableDef, error) { +func (tcc *TxnCompilerContext) ResolveById(tableId uint64, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { tempCtx := tcc.execCtx.reqCtx txn := tcc.GetTxnHandler().GetTxn() - if plan2.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { txn = txn.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -421,7 +420,7 @@ func (tcc *TxnCompilerContext) ResolveById(tableId uint64, snapshot *plan2.Snaps // convert returnTableID := int64(tableId) - obj := &plan2.ObjectRef{ + obj := &plan.ObjectRef{ SchemaName: dbName, ObjName: tableName, Obj: returnTableID, @@ -430,7 +429,7 @@ func (tcc *TxnCompilerContext) ResolveById(tableId uint64, snapshot *plan2.Snaps return obj, tableDef, nil } -func (tcc *TxnCompilerContext) ResolveSubscriptionTableById(tableId uint64, subMeta *plan.SubscriptionMeta) (*plan2.ObjectRef, *plan2.TableDef, error) { +func (tcc *TxnCompilerContext) ResolveSubscriptionTableById(tableId uint64, subMeta *plan.SubscriptionMeta) (*plan.ObjectRef, *plan.TableDef, error) { txn := tcc.GetTxnHandler().GetTxn() pubContext := tcc.execCtx.reqCtx @@ -446,7 +445,7 @@ func (tcc *TxnCompilerContext) ResolveSubscriptionTableById(tableId uint64, subM // convert returnTableID := int64(tableId) - obj := &plan2.ObjectRef{ + obj := &plan.ObjectRef{ SchemaName: dbName, ObjName: tableName, Obj: returnTableID, @@ -455,7 +454,7 @@ func (tcc *TxnCompilerContext) ResolveSubscriptionTableById(tableId uint64, subM return obj, tableDef, nil } -func (tcc *TxnCompilerContext) Resolve(dbName string, tableName string, snapshot *plan2.Snapshot) (*plan2.ObjectRef, *plan2.TableDef, error) { +func (tcc *TxnCompilerContext) Resolve(dbName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { start := time.Now() defer func() { end := time.Since(start).Seconds() @@ -510,7 +509,7 @@ func (tcc *TxnCompilerContext) Resolve(dbName string, tableName string, snapshot } tableID := int64(table.GetTableID(ctx)) - obj := &plan2.ObjectRef{ + obj := &plan.ObjectRef{ SchemaName: dbName, ObjName: tableName, Obj: tableID, @@ -527,8 +526,8 @@ func (tcc *TxnCompilerContext) Resolve(dbName string, tableName string, snapshot func (tcc *TxnCompilerContext) ResolveIndexTableByRef( ref *plan.ObjectRef, tblName string, - snapshot *plan2.Snapshot, -) (*plan2.ObjectRef, *plan2.TableDef, error) { + snapshot *plan.Snapshot, +) (*plan.ObjectRef, *plan.TableDef, error) { start := time.Now() defer func() { end := time.Since(start).Seconds() @@ -560,7 +559,7 @@ func (tcc *TxnCompilerContext) ResolveIndexTableByRef( } tableID := int64(table.GetTableID(ctx)) - obj := &plan2.ObjectRef{ + obj := &plan.ObjectRef{ SchemaName: ref.SchemaName, ObjName: tblName, Obj: tableID, @@ -832,7 +831,7 @@ func (tcc *TxnCompilerContext) ResolveAccountIds(accountNames []string) (account return accountIds, err } -func (tcc *TxnCompilerContext) Stats(obj *plan2.ObjectRef, snapshot *plan2.Snapshot) (*pb.StatsInfo, error) { +func (tcc *TxnCompilerContext) Stats(obj *plan.ObjectRef, snapshot *plan.Snapshot) (*pb.StatsInfo, error) { statser := statistic.StatsInfoFromContext(tcc.execCtx.reqCtx) start := time.Now() defer func() { @@ -866,7 +865,7 @@ func (tcc *TxnCompilerContext) Stats(obj *plan2.ObjectRef, snapshot *plan2.Snaps return result, nil } -func (tcc *TxnCompilerContext) doStatsHeavyWork(obj *plan2.ObjectRef, snapshot *plan2.Snapshot, tableID uint64) (*pb.StatsInfo, error) { +func (tcc *TxnCompilerContext) doStatsHeavyWork(obj *plan.ObjectRef, snapshot *plan.Snapshot, tableID uint64) (*pb.StatsInfo, error) { dbName := obj.GetSchemaName() tableName := obj.GetObjName() @@ -957,7 +956,7 @@ func (tcc *TxnCompilerContext) GetQueryResultMeta(uuid string) ([]*plan.ColDef, return r.ResultCols, str, nil } -func (tcc *TxnCompilerContext) GetSubscriptionMeta(dbName string, snapshot *plan2.Snapshot) (*plan.SubscriptionMeta, error) { +func (tcc *TxnCompilerContext) GetSubscriptionMeta(dbName string, snapshot *plan.Snapshot) (*plan.SubscriptionMeta, error) { start := time.Now() defer func() { v2.GetSubMetaDurationHistogram.Observe(time.Since(start).Seconds()) @@ -965,7 +964,7 @@ func (tcc *TxnCompilerContext) GetSubscriptionMeta(dbName string, snapshot *plan tempCtx := tcc.execCtx.reqCtx txn := tcc.GetTxnHandler().GetTxn() - if plan2.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(txn.Txn().SnapshotTS) { txn = txn.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -1033,7 +1032,7 @@ func makeResultMetaPath(accountName string, statementId string) string { return fmt.Sprintf("query_result_meta/%s_%s.blk", accountName, statementId) } -func (tcc *TxnCompilerContext) ResolveSnapshotWithSnapshotName(snapshotName string) (*plan2.Snapshot, error) { +func (tcc *TxnCompilerContext) ResolveSnapshotWithSnapshotName(snapshotName string) (*plan.Snapshot, error) { tenantCtx := tcc.GetContext() if snapshot := tcc.GetSnapshot(); snapshot != nil && snapshot.GetTenant() != nil { tenantCtx = defines.AttachAccount(tenantCtx, snapshot.Tenant.TenantID, GetAdminUserId(), GetAccountAdminRoleId()) diff --git a/pkg/frontend/computation_wrapper.go b/pkg/frontend/computation_wrapper.go index 15b220ca0ab59..8514a0984f2e0 100644 --- a/pkg/frontend/computation_wrapper.go +++ b/pkg/frontend/computation_wrapper.go @@ -20,8 +20,6 @@ import ( "time" "github.com/google/uuid" - "github.com/mohae/deepcopy" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" @@ -33,7 +31,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/compile" "github.com/matrixorigin/matrixone/pkg/sql/models" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" util2 "github.com/matrixorigin/matrixone/pkg/util" "github.com/matrixorigin/matrixone/pkg/util/trace" @@ -41,6 +39,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/cache" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/mohae/deepcopy" ) var ( @@ -49,7 +48,7 @@ var ( type TxnComputationWrapper struct { stmt tree.Statement - plan *plan2.Plan + plan *plan.Plan proc *process.Process ses FeSession compile *compile.Compile @@ -133,30 +132,30 @@ func (cwft *TxnComputationWrapper) GetProcess() *process.Process { func (cwft *TxnComputationWrapper) GetColumns(ctx context.Context) ([]interface{}, error) { var err error - cols := plan2.GetResultColumnsFromPlan(cwft.plan) + cols := planner.GetResultColumnsFromPlan(cwft.plan) switch cwft.GetAst().(type) { case *tree.ShowColumns: if len(cols) == 7 { - cols = []*plan2.ColDef{ - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Field"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Type"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Null"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Key"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Default"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Extra"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Comment"}, + cols = []*plan.ColDef{ + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Field"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Type"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Null"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Key"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Default"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Extra"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Comment"}, } } else { - cols = []*plan2.ColDef{ - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Field"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Type"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Collation"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Null"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Key"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Default"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Extra"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Privileges"}, - {Typ: plan2.Type{Id: int32(types.T_char)}, Name: "Comment"}, + cols = []*plan.ColDef{ + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Field"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Type"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Collation"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Null"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Key"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Default"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Extra"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Privileges"}, + {Typ: plan.Type{Id: int32(types.T_char)}, Name: "Comment"}, } } } @@ -505,7 +504,7 @@ func createCompile( proc *process.Process, originSQL string, stmt tree.Statement, - plan *plan2.Plan, + pl *plan.Plan, fill func(*batch.Batch, *perfcounter.CounterSet) error, isPrepare bool, ) (retCompile *compile.Compile, err error) { @@ -561,14 +560,14 @@ func createCompile( getStatementStartAt(execCtx.reqCtx), ) retCompile.SetIsPrepare(isPrepare) - retCompile.SetBuildPlanFunc(func(ctx context.Context) (*plan2.Plan, error) { + retCompile.SetBuildPlanFunc(func(ctx context.Context) (*plan.Plan, error) { // No permission verification is required when retry execute buildPlan plan, err := buildPlan(ctx, ses, ses.GetTxnCompileCtx(), stmt) if err != nil { return nil, err } if plan.IsPrepare { - _, _, err = plan2.ResetPreparePlan(ses.GetTxnCompileCtx(), plan) + _, _, err = planner.ResetPreparePlan(ses.GetTxnCompileCtx(), plan) } return plan, err }) @@ -581,7 +580,7 @@ func createCompile( fill = func(bat *batch.Batch, crs *perfcounter.CounterSet) error { return nil } } - err = retCompile.Compile(execCtx.reqCtx, plan, fill) + err = retCompile.Compile(execCtx.reqCtx, pl, fill) if err != nil { return } diff --git a/pkg/frontend/data_branch.go b/pkg/frontend/data_branch.go index 9f0537943de81..bb1be68528999 100644 --- a/pkg/frontend/data_branch.go +++ b/pkg/frontend/data_branch.go @@ -50,7 +50,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vectorindex/sqlexec" @@ -2454,7 +2454,7 @@ func diffOnBase( } else if dagInfo.lcaTableId == tblStuff.baseRel.GetTableID(ctx) { tblStuff.lcaRel = tblStuff.baseRel } else { - lcaSnapshot := &plan2.Snapshot{ + lcaSnapshot := &plan.Snapshot{ Tenant: &plan.SnapshotTenant{ TenantID: ses.GetAccountId(), }, @@ -2496,7 +2496,7 @@ func diffOnBase( // lcaSnapshot *plan.Snapshot //) // - //lcaSnapshot = &plan2.Snapshot{ + //lcaSnapshot = &planner.Snapshot{ // Tenant: &plan.SnapshotTenant{ // TenantID: ses.GetAccountId(), // }, @@ -2838,7 +2838,7 @@ func hashDiffIfHasLCA( return } - baseDeleteBatches = plan2.RemoveIf(baseDeleteBatches, func(t batchWithKind) bool { + baseDeleteBatches = planner.RemoveIf(baseDeleteBatches, func(t batchWithKind) bool { if t.batch.RowCount() == 0 { tblStuff.retPool.releaseRetBatch(t.batch, false) return true @@ -2850,7 +2850,7 @@ func hashDiffIfHasLCA( return } - baseUpdateBatches = plan2.RemoveIf(baseUpdateBatches, func(t batchWithKind) bool { + baseUpdateBatches = planner.RemoveIf(baseUpdateBatches, func(t batchWithKind) bool { if t.batch.RowCount() == 0 { tblStuff.retPool.releaseRetBatch(t.batch, false) return true @@ -4308,7 +4308,7 @@ func decideCollectRange( // do nothing } else { if lcaRel, err = getRelationById( - ctx, ses, bh, dagInfo.lcaTableId, &plan2.Snapshot{ + ctx, ses, bh, dagInfo.lcaTableId, &plan.Snapshot{ Tenant: &plan.SnapshotTenant{TenantID: ses.GetAccountId()}, TS: ×tamp.Timestamp{PhysicalTime: tarSp.Physical()}, }); err != nil { diff --git a/pkg/frontend/func_mo_test.go b/pkg/frontend/func_mo_test.go index 4c53e2388a3ea..40ff9c7e26d1f 100644 --- a/pkg/frontend/func_mo_test.go +++ b/pkg/frontend/func_mo_test.go @@ -19,7 +19,7 @@ import ( "testing" "github.com/golang/mock/gomock" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/stretchr/testify/require" ) diff --git a/pkg/frontend/mysql_cmd_executor.go b/pkg/frontend/mysql_cmd_executor.go index b1fa025b478c6..c1826f66edf7a 100644 --- a/pkg/frontend/mysql_cmd_executor.go +++ b/pkg/frontend/mysql_cmd_executor.go @@ -61,8 +61,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/explain" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/explain" "github.com/matrixorigin/matrixone/pkg/txn/client" txnTrace "github.com/matrixorigin/matrixone/pkg/txn/trace" "github.com/matrixorigin/matrixone/pkg/util" @@ -936,7 +936,7 @@ func doShowVariables(ses *Session, execCtx *ExecCtx, sv *tree.ShowVariables) err if err != nil { return err } - binder := plan2.NewDefaultBinder(execCtx.reqCtx, nil, nil, plan2.Type{Id: int32(types.T_varchar), Width: types.MaxVarcharLen}, []string{"variable_name", "value"}) + binder := planner.NewDefaultBinder(execCtx.reqCtx, nil, nil, plan.Type{Id: int32(types.T_varchar), Width: types.MaxVarcharLen}, []string{"variable_name", "value"}) planExpr, err := binder.BindExpr(sv.Where.Expr, 0, false) if err != nil { return err @@ -1061,7 +1061,7 @@ func doExplainStmt(reqCtx context.Context, ses *Session, stmt *tree.ExplainStmt) paramVals := ses.GetTxnCompileCtx().tcw.ParamVals() if len(paramVals) > 0 { //replace the param var in the plan by the param value - exPlan, err = plan2.FillValuesOfParamsInPlan(reqCtx, exPlan, paramVals) + exPlan, err = planner.FillValuesOfParamsInPlan(reqCtx, exPlan, paramVals) if err != nil { return err } @@ -1092,7 +1092,7 @@ func doExplainStmt(reqCtx context.Context, ses *Session, stmt *tree.ExplainStmt) } col1 := new(MysqlColumn) col1.SetColumnType(defines.MYSQL_TYPE_VAR_STRING) - col1.SetName(plan2.GetPlanTitle(explainQuery.QueryPlan, txnHaveDDL)) + col1.SetName(planner.GetPlanTitle(explainQuery.QueryPlan, txnHaveDDL)) mrs := ses.GetMysqlResultSet() mrs.AddColumn(col1) @@ -1209,7 +1209,7 @@ func createPrepareStmt( dcPrepare, ok := preparePlan.GetDcl().Control.(*plan.DataControl_Prepare) if ok { - columns := plan2.GetResultColumnsFromPlan(dcPrepare.Prepare.Plan) + columns := planner.GetResultColumnsFromPlan(dcPrepare.Prepare.Plan) if prepareStmt.ColDefData, err = execCtx.resper.MysqlRrWr().MakeColumnDefData(execCtx.reqCtx, columns); err != nil { logutil.Errorf("Error make column def data for prepare statement: %v", err) } @@ -1712,7 +1712,7 @@ func doShowCollation(ses *Session, execCtx *ExecCtx, proc *process.Process, sc * } if sc.Where != nil { - binder := plan2.NewDefaultBinder(execCtx.reqCtx, nil, nil, plan2.Type{Id: int32(types.T_varchar), Width: types.MaxVarcharLen}, []string{"collation", "charset", "id", "default", "compiled", "sortlen", "pad_attribute"}) + binder := planner.NewDefaultBinder(execCtx.reqCtx, nil, nil, plan.Type{Id: int32(types.T_varchar), Width: types.MaxVarcharLen}, []string{"collation", "charset", "id", "default", "compiled", "sortlen", "pad_attribute"}) planExpr, err := binder.BindExpr(sc.Where.Expr, 0, false) if err != nil { return err @@ -2005,8 +2005,8 @@ func buildMoExplainPhyPlan(execCtx *ExecCtx, explainColName string, reader *bufi return err } -func buildPlan(reqCtx context.Context, ses FeSession, ctx plan2.CompilerContext, stmt tree.Statement) (*plan2.Plan, error) { - var ret *plan2.Plan +func buildPlan(reqCtx context.Context, ses FeSession, ctx planner.CompilerContext, stmt tree.Statement) (*plan.Plan, error) { + var ret *plan.Plan var err error txnOp := ctx.GetProcess().GetTxnOperator() @@ -2073,7 +2073,7 @@ func buildPlan(reqCtx context.Context, ses FeSession, ctx plan2.CompilerContext, // Handle specific statement types if s, ok := stmt.(*tree.Insert); ok { if _, ok := s.Rows.Select.(*tree.ValuesClause); ok { - ret, err = plan2.BuildPlan(ctx, stmt, isPrepareStmt) + ret, err = planner.BuildPlan(ctx, stmt, isPrepareStmt) if err != nil { return nil, err } @@ -2092,19 +2092,19 @@ func buildPlan(reqCtx context.Context, ses FeSession, ctx plan2.CompilerContext, *tree.ShowDatabases, *tree.ShowTables, *tree.ShowSequences, *tree.ShowColumns, *tree.ShowColumnNumber, *tree.ShowTableNumber, *tree.ShowCreateDatabase, *tree.ShowCreateTable, *tree.ShowIndex, *tree.ExplainStmt, *tree.ExplainAnalyze, *tree.ExplainPhyPlan: - opt := plan2.NewBaseOptimizer(ctx) + opt := planner.NewBaseOptimizer(ctx) optimized, err := opt.Optimize(stmt, isPrepareStmt) if err != nil { return nil, err } - ret = &plan2.Plan{ - Plan: &plan2.Plan_Query{ + ret = &plan.Plan{ + Plan: &plan.Plan_Query{ Query: optimized, }, } default: - ret, err = plan2.BuildPlan(ctx, stmt, isPrepareStmt) + ret, err = planner.BuildPlan(ctx, stmt, isPrepareStmt) } if ret != nil { @@ -2115,7 +2115,7 @@ func buildPlan(reqCtx context.Context, ses FeSession, ctx plan2.CompilerContext, // buildPlanWithAuthorization wraps the buildPlan function to perform permission checks // after the plan has been successfully built. -var buildPlanWithAuthorization = func(reqCtx context.Context, ses FeSession, ctx plan2.CompilerContext, stmt tree.Statement) (*plan2.Plan, error) { +var buildPlanWithAuthorization = func(reqCtx context.Context, ses FeSession, ctx planner.CompilerContext, stmt tree.Statement) (*plan.Plan, error) { planContext := ctx.GetContext() stats := statistic.StatsInfoFromContext(planContext) @@ -2137,7 +2137,7 @@ var buildPlanWithAuthorization = func(reqCtx context.Context, ses FeSession, ctx return plan, nil } -func checkModify(plan0 *plan.Plan, resolveFn func(string, string, *plan2.Snapshot) (*plan2.ObjectRef, *plan2.TableDef, error)) (bool, error) { +func checkModify(plan0 *plan.Plan, resolveFn func(string, string, *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error)) (bool, error) { if plan0 == nil { return true, nil } @@ -2492,7 +2492,7 @@ func processLoadLocal(ses FeSession, execCtx *ExecCtx, param *tree.ExternParam, //free load local buffer anyway mysqlRwer.FreeLoadLocal() }() - err = plan2.InitInfileParam(param) + err = planner.InitInfileParam(param) if err != nil { return } @@ -3297,11 +3297,11 @@ func doComQuery(ses *Session, execCtx *ExecCtx, input *UserInput) (retErr error) return nil } -func checkNodeCanCache(p *plan2.Plan) bool { +func checkNodeCanCache(p *plan.Plan) bool { if p == nil { return true } - if q, ok := p.Plan.(*plan2.Plan_Query); ok { + if q, ok := p.Plan.(*plan.Plan_Query); ok { for _, node := range q.Query.Nodes { if node.NotCacheable { return false @@ -3673,7 +3673,7 @@ type jsonPlanHandler struct { buffer *bytes.Buffer } -func NewJsonPlanHandler(ctx context.Context, stmt *motrace.StatementInfo, ses FeSession, plan *plan2.Plan, phyPlan *models.PhyPlan, opts ...marshalPlanOptions) *jsonPlanHandler { +func NewJsonPlanHandler(ctx context.Context, stmt *motrace.StatementInfo, ses FeSession, plan *plan.Plan, phyPlan *models.PhyPlan, opts ...marshalPlanOptions) *jsonPlanHandler { h := NewMarshalPlanHandler(ctx, stmt, plan, phyPlan, opts...) jsonBytes := h.Marshal(ctx) statsBytes, stats := h.Stats(ctx, ses) @@ -3725,7 +3725,7 @@ type marshalPlanHandler struct { marshalPlanConfig } -func NewMarshalPlanHandler(ctx context.Context, stmt *motrace.StatementInfo, plan *plan2.Plan, phyPlan *models.PhyPlan, opts ...marshalPlanOptions) *marshalPlanHandler { +func NewMarshalPlanHandler(ctx context.Context, stmt *motrace.StatementInfo, plan *plan.Plan, phyPlan *models.PhyPlan, opts ...marshalPlanOptions) *marshalPlanHandler { // TODO: need mem improvement uuid := uuid.UUID(stmt.StatementID) stmt.MarkResponseAt() diff --git a/pkg/frontend/mysql_cmd_executor_test.go b/pkg/frontend/mysql_cmd_executor_test.go index 56773ede5f939..e3f4331773d18 100644 --- a/pkg/frontend/mysql_cmd_executor_test.go +++ b/pkg/frontend/mysql_cmd_executor_test.go @@ -25,11 +25,6 @@ import ( "github.com/golang/mock/gomock" "github.com/google/uuid" - "github.com/prashantv/gostub" - "github.com/smartystreets/goconvey/convey" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -44,20 +39,24 @@ import ( "github.com/matrixorigin/matrixone/pkg/frontend/constant" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/pb/metadata" - plan0 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/explain" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/explain" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + "github.com/smartystreets/goconvey/convey" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func init() { @@ -1013,7 +1012,7 @@ func TestSerializePlanToJson(t *testing.T) { } for _, sql := range sqls { - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) plan, err := buildSingleSql(mock, t, sql) if err != nil { t.Fatalf("%+v", err) @@ -1029,14 +1028,14 @@ func TestSerializePlanToJson(t *testing.T) { } } -func buildSingleSql(opt plan.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { +func buildSingleSql(opt planner.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { stmts, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { t.Fatalf("%+v", err) } // this sql always return one stmt ctx := opt.CurrentContext() - return plan.BuildPlan(ctx, stmts[0], false) + return planner.BuildPlan(ctx, stmts[0], false) } func Test_getSqlType(t *testing.T) { @@ -1829,35 +1828,35 @@ func Test_checkModify(t *testing.T) { }, { node: &plan.Node{ - InsertCtx: &plan0.InsertCtx{}, + InsertCtx: &plan.InsertCtx{}, }, expected_flag: true, expected_err: false, }, { node: &plan.Node{ - ReplaceCtx: &plan0.ReplaceCtx{}, + ReplaceCtx: &plan.ReplaceCtx{}, }, expected_flag: true, expected_err: false, }, { node: &plan.Node{ - DeleteCtx: &plan0.DeleteCtx{}, + DeleteCtx: &plan.DeleteCtx{}, }, expected_flag: true, expected_err: false, }, { node: &plan.Node{ - PreInsertCtx: &plan0.PreInsertCtx{}, + PreInsertCtx: &plan.PreInsertCtx{}, }, expected_flag: true, expected_err: false, }, { node: &plan.Node{ - OnDuplicateKey: &plan0.OnDuplicateKeyCtx{}, + OnDuplicateKey: &plan.OnDuplicateKeyCtx{}, }, expected_flag: true, expected_err: false, diff --git a/pkg/frontend/mysql_protocol.go b/pkg/frontend/mysql_protocol.go index bc5000bb31b5d..4b8ac724606e6 100644 --- a/pkg/frontend/mysql_protocol.go +++ b/pkg/frontend/mysql_protocol.go @@ -31,9 +31,6 @@ import ( "time" "unicode" - "go.uber.org/zap" - "golang.org/x/exp/slices" - "github.com/matrixorigin/matrixone/pkg/common/moerr" util2 "github.com/matrixorigin/matrixone/pkg/common/util" "github.com/matrixorigin/matrixone/pkg/config" @@ -42,13 +39,15 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/logutil" - planPb "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/proxy" "github.com/matrixorigin/matrixone/pkg/perfcounter" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" + "golang.org/x/exp/slices" ) // DefaultCapability means default capabilities of the server @@ -682,7 +681,7 @@ type response320 struct { } func (mp *MysqlProtocolImpl) SendPrepareResponse(ctx context.Context, stmt *PrepareStmt) error { - dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*planPb.DataControl_Prepare) + dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*plan.DataControl_Prepare) if !ok { return moerr.NewInternalError(ctx, "can not get Prepare plan in prepareStmt") } @@ -692,7 +691,7 @@ func (mp *MysqlProtocolImpl) SendPrepareResponse(ctx context.Context, stmt *Prep } paramTypes := dcPrepare.Prepare.ParamTypes numParams := len(paramTypes) - columns := plan2.GetResultColumnsFromPlan(dcPrepare.Prepare.Plan) + columns := planner.GetResultColumnsFromPlan(dcPrepare.Prepare.Plan) numColumns := len(columns) var data []byte @@ -765,7 +764,7 @@ func (mp *MysqlProtocolImpl) SendPrepareResponse(ctx context.Context, stmt *Prep func (mp *MysqlProtocolImpl) ParseSendLongData(ctx context.Context, proc *process.Process, stmt *PrepareStmt, data []byte, pos int) error { var err error - dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*planPb.DataControl_Prepare) + dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*plan.DataControl_Prepare) if !ok { return moerr.NewInternalError(ctx, "can not get Prepare plan in prepareStmt") } @@ -801,7 +800,7 @@ func (mp *MysqlProtocolImpl) ParseSendLongData(ctx context.Context, proc *proces func (mp *MysqlProtocolImpl) ParseExecuteData(ctx context.Context, proc *process.Process, stmt *PrepareStmt, data []byte, pos int) error { var err error - dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*planPb.DataControl_Prepare) + dcPrepare, ok := stmt.PreparePlan.GetDcl().Control.(*plan.DataControl_Prepare) if !ok { return moerr.NewInternalError(ctx, "can not get Prepare plan in prepareStmt") } @@ -2204,7 +2203,7 @@ func (mp *MysqlProtocolImpl) makeColumnDefinition41Payload(column *MysqlColumn, return data[:pos] } -func (mp *MysqlProtocolImpl) MakeColumnDefData(ctx context.Context, columns []*planPb.ColDef) ([][]byte, error) { +func (mp *MysqlProtocolImpl) MakeColumnDefData(ctx context.Context, columns []*plan.ColDef) ([][]byte, error) { numColumns := len(columns) colDefData := make([][]byte, 0, numColumns) for i := 0; i < numColumns; i++ { diff --git a/pkg/frontend/mysql_protocol_test.go b/pkg/frontend/mysql_protocol_test.go index b81b721395a27..976ca085a9e8f 100644 --- a/pkg/frontend/mysql_protocol_test.go +++ b/pkg/frontend/mysql_protocol_test.go @@ -32,11 +32,6 @@ import ( mysqlDriver "github.com/go-sql-driver/mysql" "github.com/golang/mock/gomock" fuzz "github.com/google/gofuzz" - "github.com/prashantv/gostub" - "github.com/smartystreets/goconvey/convey" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/config" @@ -47,18 +42,22 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/logutil" - planPb "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + "github.com/smartystreets/goconvey/convey" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func registerConn(clientConn net.Conn) { @@ -2317,7 +2316,7 @@ func TestSendPrepareResponse(t *testing.T) { if err != nil { t.Error(err) } - compCtx := plan.NewEmptyCompilerContext() + compCtx := planner.NewEmptyCompilerContext() preparePlan, err := buildPlan(context.TODO(), nil, compCtx, st) if err != nil { t.Error(err) @@ -2350,7 +2349,7 @@ func TestSendPrepareResponse(t *testing.T) { if err != nil { t.Error(err) } - compCtx := plan.NewEmptyCompilerContext() + compCtx := planner.NewEmptyCompilerContext() preparePlan, err := buildPlan(context.TODO(), nil, compCtx, st) if err != nil { t.Error(err) @@ -2388,7 +2387,7 @@ func FuzzParseExecuteData(f *testing.F) { if err != nil { f.Error(err) } - compCtx := plan.NewEmptyCompilerContext() + compCtx := planner.NewEmptyCompilerContext() preparePlan, err := buildPlan(context.TODO(), nil, compCtx, st) if err != nil { f.Error(err) @@ -3216,7 +3215,7 @@ func (fp *testMysqlWriter) Flush() error { return nil } -func (fp *testMysqlWriter) MakeColumnDefData(ctx context.Context, columns []*planPb.ColDef) ([][]byte, error) { +func (fp *testMysqlWriter) MakeColumnDefData(ctx context.Context, columns []*plan.ColDef) ([][]byte, error) { return nil, nil } diff --git a/pkg/frontend/pitr.go b/pkg/frontend/pitr.go index a48a71cdb10f7..2f6c2f942eef7 100644 --- a/pkg/frontend/pitr.go +++ b/pkg/frontend/pitr.go @@ -22,19 +22,18 @@ import ( "time" "github.com/google/uuid" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/defines" - pbplan "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" + "go.uber.org/zap" ) var ( @@ -1718,12 +1717,12 @@ func restoreViewsWithPitr( err error stmts []tree.Statement sortedViews []string - snapshot *pbplan.Snapshot - oldSnapshot *pbplan.Snapshot + snapshot *plan.Snapshot + oldSnapshot *plan.Snapshot ) - snapshot = &pbplan.Snapshot{ + snapshot = &plan.Snapshot{ TS: ×tamp.Timestamp{PhysicalTime: ts}, - Tenant: &pbplan.SnapshotTenant{ + Tenant: &plan.SnapshotTenant{ TenantName: accountName, TenantID: curAccount, }, @@ -1746,10 +1745,10 @@ func restoreViewsWithPitr( compCtx.SetDatabase(viewEntry.dbName) // build create sql to find dependent views - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { stmts, _ = parsers.Parse(ctx, dialect.MYSQL, viewEntry.createSql, 0) - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { return err } diff --git a/pkg/frontend/plan_cache.go b/pkg/frontend/plan_cache.go index c422a770891cc..a05155a059bd1 100644 --- a/pkg/frontend/plan_cache.go +++ b/pkg/frontend/plan_cache.go @@ -17,8 +17,8 @@ package frontend import ( "container/list" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" ) type cachedPlan struct { diff --git a/pkg/frontend/result_row_stmt.go b/pkg/frontend/result_row_stmt.go index 3705475c1df72..ebc342c834e16 100644 --- a/pkg/frontend/result_row_stmt.go +++ b/pkg/frontend/result_row_stmt.go @@ -20,20 +20,19 @@ import ( "strings" "time" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/explain" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/explain" + "go.uber.org/zap" ) -func GetExplainColumn(ctx context.Context, explainColName string) ([]*plan2.ColDef, []interface{}, error) { - cols := []*plan2.ColDef{ +func GetExplainColumn(ctx context.Context, explainColName string) ([]*plan.ColDef, []interface{}, error) { + cols := []*plan.ColDef{ { - Typ: plan2.Type{Id: int32(types.T_varchar)}, + Typ: plan.Type{Id: int32(types.T_varchar)}, Name: strings.ToLower(explainColName), OriginName: explainColName, }, @@ -53,7 +52,7 @@ func GetExplainColumn(ctx context.Context, explainColName string) ([]*plan2.ColD // executeResultRowStmt run the statemet that responses result rows func executeResultRowStmt(ses *Session, execCtx *ExecCtx) (err error) { var columns []interface{} - var colDefs []*plan2.ColDef + var colDefs []*plan.ColDef ses.EnterFPrint(FPResultRowStmt) defer ses.ExitFPrint(FPResultRowStmt) switch statement := execCtx.stmt.(type) { @@ -67,7 +66,7 @@ func executeResultRowStmt(ses *Session, execCtx *ExecCtx) (err error) { return } - ses.rs = &plan.ResultColDef{ResultCols: plan2.GetResultColumnsFromPlan(execCtx.cw.Plan())} + ses.rs = &plan.ResultColDef{ResultCols: planner.GetResultColumnsFromPlan(execCtx.cw.Plan())} ses.EnterFPrint(FPResultRowStmtSelect1) defer ses.ExitFPrint(FPResultRowStmtSelect1) @@ -104,9 +103,9 @@ func executeResultRowStmt(ses *Session, execCtx *ExecCtx) (err error) { var explainColName string if _, ok := statement.(*tree.ExplainAnalyze); ok { - explainColName = plan2.GetPlanTitle(query, txnHaveDDL) + explainColName = planner.GetPlanTitle(query, txnHaveDDL) } else { - explainColName = plan2.GetPhyPlanTitle(query, txnHaveDDL) + explainColName = planner.GetPhyPlanTitle(query, txnHaveDDL) } colDefs, columns, err = GetExplainColumn(reqCtx, explainColName) @@ -152,7 +151,7 @@ func executeResultRowStmt(ses *Session, execCtx *ExecCtx) (err error) { return } - ses.rs = &plan.ResultColDef{ResultCols: plan2.GetResultColumnsFromPlan(execCtx.cw.Plan())} + ses.rs = &plan.ResultColDef{ResultCols: planner.GetResultColumnsFromPlan(execCtx.cw.Plan())} ses.EnterFPrint(FPResultRowStmtDefault1) defer ses.ExitFPrint(FPResultRowStmtDefault1) @@ -271,11 +270,11 @@ func (resper *MysqlResp) respStreamResultRow(ses *Session, if ws != nil { txnHaveDDL = ws.GetHaveDDL() } - explainColName := plan2.GetPlanTitle(queryPlan.GetQuery(), txnHaveDDL) + explainColName := planner.GetPlanTitle(queryPlan.GetQuery(), txnHaveDDL) //if it is the plan from the EXECUTE, // replace the plan by the plan generated by the PREPARE if len(execCtx.cw.ParamVals()) != 0 { - queryPlan, err = plan2.FillValuesOfParamsInPlan(execCtx.reqCtx, queryPlan, execCtx.cw.ParamVals()) + queryPlan, err = planner.FillValuesOfParamsInPlan(execCtx.reqCtx, queryPlan, execCtx.cw.ParamVals()) if err != nil { return } @@ -313,7 +312,7 @@ func (resper *MysqlResp) respStreamResultRow(ses *Session, if ws != nil { txnHaveDDL = ws.GetHaveDDL() } - explainColName := plan2.GetPlanTitle(queryPlan.GetQuery(), txnHaveDDL) + explainColName := planner.GetPlanTitle(queryPlan.GetQuery(), txnHaveDDL) txnCompileWrapper := execCtx.cw.(*TxnComputationWrapper) reader := bufio.NewReader(txnCompileWrapper.explainBuffer) diff --git a/pkg/frontend/routine_test.go b/pkg/frontend/routine_test.go index 86f1789c7ebd3..3e2cd2a473b66 100644 --- a/pkg/frontend/routine_test.go +++ b/pkg/frontend/routine_test.go @@ -26,17 +26,11 @@ import ( "github.com/golang/mock/gomock" "github.com/google/uuid" - "github.com/prashantv/gostub" - pcg "github.com/prometheus/client_model/go" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "golang.org/x/sync/errgroup" - "github.com/matrixorigin/matrixone/pkg/config" "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/logutil" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" @@ -44,6 +38,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/util/metric" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + pcg "github.com/prometheus/client_model/go" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/sync/errgroup" ) func Test_inc_dec(t *testing.T) { @@ -146,7 +145,7 @@ var newMockWrapper = func(ctrl *gomock.Controller, ses *Session, mcw.EXPECT().GetLoadTag().Return(false).AnyTimes() mcw.EXPECT().Clear().AnyTimes() mcw.EXPECT().Free().AnyTimes() - mcw.EXPECT().Plan().Return(&plan2.Plan{}).AnyTimes() + mcw.EXPECT().Plan().Return(&plan.Plan{}).AnyTimes() mcw.EXPECT().BinaryExecute().Return(false, "").AnyTimes() return mcw } diff --git a/pkg/frontend/session.go b/pkg/frontend/session.go index 648116145ccf4..a3b8d48f190e6 100644 --- a/pkg/frontend/session.go +++ b/pkg/frontend/session.go @@ -27,9 +27,6 @@ import ( "time" "github.com/google/uuid" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - "github.com/matrixorigin/matrixone/pkg/bootstrap/versions" "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/common/log" @@ -44,13 +41,15 @@ import ( "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util" db_holder "github.com/matrixorigin/matrixone/pkg/util/export/etl/db" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/util/trace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" ) var ( @@ -193,7 +192,7 @@ type Session struct { planCache *planCache - statsCache *plan2.StatsCache + statsCache *planner.StatsCache seqCurValues map[uint64]string /* @@ -379,7 +378,7 @@ func (ses *Session) GetProc() *process.Process { return ses.proc } -func (ses *Session) GetStatsCache() *plan2.StatsCache { +func (ses *Session) GetStatsCache() *planner.StatsCache { return ses.statsCache } @@ -664,7 +663,7 @@ func NewSession( connType: ConnTypeUnset, timestampMap: map[TS]time.Time{}, - statsCache: plan2.NewStatsCache(), + statsCache: planner.NewStatsCache(), } atomic.StoreInt32(&ses.sqlModeNoAutoValueOnZero, -1) diff --git a/pkg/frontend/session_test.go b/pkg/frontend/session_test.go index 6225fac860b23..ced88703ac00a 100644 --- a/pkg/frontend/session_test.go +++ b/pkg/frontend/session_test.go @@ -36,7 +36,6 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/toml" @@ -363,12 +362,12 @@ func TestSession_TxnCompilerContext(t *testing.T) { tcc.execCtx = &ExecCtx{reqCtx: ctx, ses: ses} defDBName := tcc.DefaultDatabase() convey.So(defDBName, convey.ShouldEqual, "") - convey.So(tcc.DatabaseExists("abc", &plan2.Snapshot{TS: ts}), convey.ShouldBeTrue) + convey.So(tcc.DatabaseExists("abc", &plan.Snapshot{TS: ts}), convey.ShouldBeTrue) - _, _, err := tcc.getRelation("abc", "t1", nil, &plan2.Snapshot{TS: ts}) + _, _, err := tcc.getRelation("abc", "t1", nil, &plan.Snapshot{TS: ts}) convey.So(err, convey.ShouldBeNil) - object, tableRef, _ := tcc.Resolve("abc", "t1", &plan2.Snapshot{TS: ts}) + object, tableRef, _ := tcc.Resolve("abc", "t1", &plan.Snapshot{TS: ts}) convey.So(object, convey.ShouldNotBeNil) convey.So(tableRef, convey.ShouldNotBeNil) @@ -378,11 +377,11 @@ func TestSession_TxnCompilerContext(t *testing.T) { TenantId: 0, }, } - object, tableRef, _ = tcc.ResolveIndexTableByRef(ref, "indexTable", &plan2.Snapshot{TS: ts}) + object, tableRef, _ = tcc.ResolveIndexTableByRef(ref, "indexTable", &plan.Snapshot{TS: ts}) convey.So(object, convey.ShouldNotBeNil) convey.So(tableRef, convey.ShouldNotBeNil) - stats, err := tcc.Stats(&plan2.ObjectRef{SchemaName: "abc", ObjName: "t1"}, &plan2.Snapshot{TS: ts}) + stats, err := tcc.Stats(&plan.ObjectRef{SchemaName: "abc", ObjName: "t1"}, &plan.Snapshot{TS: ts}) convey.So(err, convey.ShouldBeNil) convey.So(stats, convey.ShouldBeNil) }) @@ -436,7 +435,7 @@ func TestSession_ResolveTempIndexTable(t *testing.T) { // In the current bug state, the code in ResolveIndexTableByRef (after my previous fix) handles nil table but throws NoSuchTable. // To reproduce the original panic (if we reverted) or the "no such table" error: - _, _, err := tcc.ResolveIndexTableByRef(ref, "index_table", &plan2.Snapshot{}) + _, _, err := tcc.ResolveIndexTableByRef(ref, "index_table", &plan.Snapshot{}) // If the index was correctly registered, we would mock it: // ses.AddTempTable("db1", "index_table", "mo_index_table_real") diff --git a/pkg/frontend/show_account.go b/pkg/frontend/show_account.go index 43808fb8271cf..949dcb3b86313 100644 --- a/pkg/frontend/show_account.go +++ b/pkg/frontend/show_account.go @@ -30,8 +30,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" "github.com/matrixorigin/matrixone/pkg/util/metric/mometric" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/vm/engine" diff --git a/pkg/frontend/snapshot.go b/pkg/frontend/snapshot.go index 9682350adb86a..708b9b18f453f 100644 --- a/pkg/frontend/snapshot.go +++ b/pkg/frontend/snapshot.go @@ -30,13 +30,13 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/pubsub" "github.com/matrixorigin/matrixone/pkg/defines" - pbplan "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" ) @@ -1390,11 +1390,11 @@ func sortedViewInfos( compCtx.SetDatabase(viewEntry.dbName) // build create sql to find dependent views - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { getLogger(ses.GetService()).Debug(fmt.Sprintf("try to build view %v failed, try to build it again", viewEntry.tblName)) stmts, _ = parsers.Parse(ctx, dialect.MYSQL, viewEntry.createSql, 0) - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { return nil, err } @@ -1608,7 +1608,7 @@ func getSnapshotByName(ctx context.Context, bh BackgroundExec, snapshotName stri } } -func doResolveSnapshotWithSnapshotName(ctx context.Context, ses FeSession, snapshotName string) (snapshot *pbplan.Snapshot, err error) { +func doResolveSnapshotWithSnapshotName(ctx context.Context, ses FeSession, snapshotName string) (snapshot *plan.Snapshot, err error) { bh := ses.GetShareTxnBackgroundExec(ctx, false) defer bh.Close() @@ -1635,13 +1635,13 @@ func doResolveSnapshotWithSnapshotName(ctx context.Context, ses FeSession, snaps } } - return &pbplan.Snapshot{ + return &plan.Snapshot{ TS: ×tamp.Timestamp{PhysicalTime: record.ts}, - Tenant: &pbplan.SnapshotTenant{ + Tenant: &plan.SnapshotTenant{ TenantName: record.accountName, TenantID: accountId, }, - ExtraInfo: &pbplan.SnapshotExtraInfo{ + ExtraInfo: &plan.SnapshotExtraInfo{ Level: record.level, ObjId: record.objId, Name: record.snapshotName, @@ -2541,7 +2541,7 @@ func getPastExistsAccounts( return } -func getSnapshotPlanWithSharedBh(ctx context.Context, bh BackgroundExec, fromAccountId uint32, snapshotName string) (snapshot *pbplan.Snapshot, err error) { +func getSnapshotPlanWithSharedBh(ctx context.Context, bh BackgroundExec, fromAccountId uint32, snapshotName string) (snapshot *plan.Snapshot, err error) { var record *snapshotRecord if record, err = getSnapshotByName(ctx, bh, snapshotName); err != nil { return @@ -2552,9 +2552,9 @@ func getSnapshotPlanWithSharedBh(ctx context.Context, bh BackgroundExec, fromAcc return } - return &pbplan.Snapshot{ + return &plan.Snapshot{ TS: ×tamp.Timestamp{PhysicalTime: record.ts}, - Tenant: &pbplan.SnapshotTenant{ + Tenant: &plan.SnapshotTenant{ TenantName: record.accountName, TenantID: fromAccountId, }, diff --git a/pkg/frontend/snapshot_restore_with_ts.go b/pkg/frontend/snapshot_restore_with_ts.go index e591972501fb8..70e7229bb5ccb 100644 --- a/pkg/frontend/snapshot_restore_with_ts.go +++ b/pkg/frontend/snapshot_restore_with_ts.go @@ -21,12 +21,12 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/defines" - pbplan "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) func fkTablesTopoSortWithTS(ctx context.Context, bh BackgroundExec, dbName string, tblName string, ts int64, from, to uint32) (sortedTbls []string, err error) { @@ -644,9 +644,9 @@ func restoreViewsFromTS( sortedViews []string oldSnapshot *plan.Snapshot ) - snapshot = &pbplan.Snapshot{ + snapshot = &plan.Snapshot{ TS: ×tamp.Timestamp{PhysicalTime: snapshotTs}, - Tenant: &pbplan.SnapshotTenant{ + Tenant: &plan.SnapshotTenant{ TenantName: restoreAccountName, TenantID: restoreAccount, }, @@ -669,11 +669,11 @@ func restoreViewsFromTS( compCtx.SetDatabase(viewEntry.dbName) // build create sql to find dependent views - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { getLogger(ses.GetService()).Info(fmt.Sprintf("try to build view %v failed, try to build it again", viewEntry.tblName)) stmts, _ = parsers.Parse(ctx, dialect.MYSQL, viewEntry.createSql, 0) - _, err = plan.BuildPlan(compCtx, stmts[0], false) + _, err = planner.BuildPlan(compCtx, stmts[0], false) if err != nil { return err } diff --git a/pkg/frontend/starlark_interpreter.go b/pkg/frontend/starlark_interpreter.go index c91a414e571d9..0ec3223d930e1 100644 --- a/pkg/frontend/starlark_interpreter.go +++ b/pkg/frontend/starlark_interpreter.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/monlp/llm" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ujson "github.com/matrixorigin/matrixone/pkg/util/json" "go.starlark.net/starlark" "go.starlark.net/starlarkstruct" @@ -294,7 +294,7 @@ func (si *starlarkInterpreter) moQuote(thread *starlark.Thread, b *starlark.Buil return starlark.NewList(ret), nil } - ret[0] = starlark.String(plan.EscapeFormat(s)) + ret[0] = starlark.String(planner.EscapeFormat(s)) return starlark.NewList(ret), nil } diff --git a/pkg/frontend/types.go b/pkg/frontend/types.go index 95dfaffdb7334..fc2bb17847803 100644 --- a/pkg/frontend/types.go +++ b/pkg/frontend/types.go @@ -24,9 +24,6 @@ import ( "time" "github.com/google/uuid" - "go.uber.org/zap" - "go.uber.org/zap/zapcore" - "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/common/malloc" "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -42,13 +39,15 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/models" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util" metric "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" + "go.uber.org/zap/zapcore" ) const ( @@ -472,7 +471,7 @@ var _ FeSession = &backSession{} type FeSession interface { GetService() string GetTimeZone() *time.Location - GetStatsCache() *plan2.StatsCache + GetStatsCache() *planner.StatsCache GetUserName() string GetSql() string GetAccountId() uint32 diff --git a/pkg/frontend/util.go b/pkg/frontend/util.go index f482aa36d1839..e2a2752a4625b 100644 --- a/pkg/frontend/util.go +++ b/pkg/frontend/util.go @@ -30,9 +30,6 @@ import ( "github.com/BurntSushi/toml" "github.com/google/uuid" - "github.com/petermattis/goid" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/cdc" "github.com/matrixorigin/matrixone/pkg/common/log" "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -52,8 +49,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" + "github.com/petermattis/goid" + "go.uber.org/zap" ) type CloseFlag struct { @@ -279,9 +278,9 @@ func getExprValue(e tree.Expr, ses *Session, execCtx *ExecCtx) (interface{}, err var planExpr *plan.Expr oid := resultVec.GetType().Oid if oid == types.T_decimal64 || oid == types.T_decimal128 { - builder := plan2.NewQueryBuilder(plan.Query_SELECT, ses.GetTxnCompileCtx(), false, false) - bindContext := plan2.NewBindContext(builder, nil) - binder := plan2.NewSetVarBinder(builder, bindContext) + builder := planner.NewQueryBuilder(plan.Query_SELECT, ses.GetTxnCompileCtx(), false, false) + bindContext := planner.NewBindContext(builder, nil) + binder := planner.NewSetVarBinder(builder, bindContext) planExpr, err = binder.BindExpr(e, 0, false) if err != nil { return nil, err @@ -298,9 +297,9 @@ func GetSimpleExprValue(ctx context.Context, e tree.Expr, feSes FeSession) (inte // set @a = on, type of a is bool. return v.ColName(), nil default: - builder := plan2.NewQueryBuilder(plan.Query_SELECT, feSes.GetTxnCompileCtx(), false, false) - bindContext := plan2.NewBindContext(builder, nil) - binder := plan2.NewSetVarBinder(builder, bindContext) + builder := planner.NewQueryBuilder(plan.Query_SELECT, feSes.GetTxnCompileCtx(), false, false) + bindContext := planner.NewBindContext(builder, nil) + binder := planner.NewSetVarBinder(builder, bindContext) planExpr, err := binder.BindExpr(e, 0, false) if err != nil { return nil, err @@ -325,7 +324,7 @@ func GetSimpleExprValue(ctx context.Context, e tree.Expr, feSes FeSession) (inte } } -func getValueFromVector(ctx context.Context, vec *vector.Vector, feSes FeSession, expr *plan2.Expr) (interface{}, error) { +func getValueFromVector(ctx context.Context, vec *vector.Vector, feSes FeSession, expr *plan.Expr) (interface{}, error) { if vec.IsConstNull() || vec.GetNulls().Contains(0) { return nil, nil } diff --git a/pkg/frontend/util_test.go b/pkg/frontend/util_test.go index 11256df86ab32..74e3efd1dffd8 100644 --- a/pkg/frontend/util_test.go +++ b/pkg/frontend/util_test.go @@ -26,12 +26,6 @@ import ( "time" "github.com/golang/mock/gomock" - "github.com/prashantv/gostub" - "github.com/smartystreets/goconvey/convey" - cvey "github.com/smartystreets/goconvey/convey" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" @@ -41,19 +35,23 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/toml" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/memoryengine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + "github.com/smartystreets/goconvey/convey" + cvey "github.com/smartystreets/goconvey/convey" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func init() { @@ -1487,7 +1485,7 @@ func Test_BuildTableDefFromMoColumns(t *testing.T) { typBytes, err := typ.Marshal() assert.Nil(t, err) - def := new(plan2.Default) + def := new(plan.Default) defBytes, err := types.Encode(def) assert.Nil(t, err) @@ -1596,7 +1594,7 @@ func Test_extractTableDefColumns(t *testing.T) { typ := new(types.Type) typByte, _ := typ.Marshal() - def := new(plan2.Default) + def := new(plan.Default) result := newMrsForPasswordOfUser([][]interface{}{{ def, @@ -1622,7 +1620,7 @@ func Test_extractTableDefColumns(t *testing.T) { var newTestExecResult = func() ExecResult { - def := new(plan2.Default) + def := new(plan.Default) result := newMrsForPasswordOfUser([][]interface{}{{ "id", @@ -1650,7 +1648,7 @@ func Test_extractTableDefColumns(t *testing.T) { typ := new(types.Type) typByte, _ := typ.Marshal() - def := new(plan2.Default) + def := new(plan.Default) result := newMrsForPasswordOfUser([][]interface{}{{ "id", @@ -1678,7 +1676,7 @@ func Test_extractTableDefColumns(t *testing.T) { typ := new(types.Type) typByte, _ := typ.Marshal() - def := new(plan2.Default) + def := new(plan.Default) result := newMrsForPasswordOfUser([][]interface{}{{ "id", diff --git a/pkg/iscp/consumer.go b/pkg/iscp/consumer.go index bdf1a3eeb8a52..4652a688cacff 100644 --- a/pkg/iscp/consumer.go +++ b/pkg/iscp/consumer.go @@ -15,7 +15,7 @@ package iscp import ( - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) diff --git a/pkg/partitionprune/filter.go b/pkg/partitionprune/filter.go index a51512afbce29..1a33b734dd113 100644 --- a/pkg/partitionprune/filter.go +++ b/pkg/partitionprune/filter.go @@ -18,15 +18,13 @@ import ( "context" "sort" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/partition" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - p "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -103,7 +101,7 @@ func hashFilterExpr( for i, pt := range metadata.Partitions { // Deep copy partition expressions to avoid modifying the original expressions // when replacing column references with actual filter values - exprs[i] = p.DeepCopyExpr(pt.Expr) + exprs[i] = plan.DeepCopyExpr(pt.Expr) } switch exprImpl := expr.Expr.(type) { case *plan.Expr_F: @@ -221,7 +219,7 @@ func rangeFilterExpr( for i, pt := range metadata.Partitions { // Deep copy partition expressions to avoid modifying the original expressions // when replacing column references with actual filter values - exprs[i] = p.DeepCopyExpr(pt.Expr) + exprs[i] = plan.DeepCopyExpr(pt.Expr) } switch exprImpl := expr.Expr.(type) { case *plan.Expr_F: @@ -542,7 +540,7 @@ func listFilterExpr( metadata partition.PartitionMetadata, ) ([]int, bool, error) { var err error - expr = p.DeepCopyExpr(expr) + expr = plan.DeepCopyExpr(expr) expr, err = ConvertFoldExprToNormal(expr) if err != nil { return nil, false, err diff --git a/pkg/partitionprune/prune.go b/pkg/partitionprune/prune.go index 0e50fa947138a..e0c881e7be32c 100644 --- a/pkg/partitionprune/prune.go +++ b/pkg/partitionprune/prune.go @@ -21,8 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/partitionservice" "github.com/matrixorigin/matrixone/pkg/pb/partition" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - p "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -112,7 +112,7 @@ func PrunePartitionByExpr( if partitionIndex != -1 { // If partitionIndex is specified, create a deep copy of the expression // and adjust column positions accordingly - expr = p.DeepCopyExpr(expr) + expr = plan.DeepCopyExpr(expr) mustReplaceColPos(expr, partitionIndex) } diff --git a/pkg/sql/plan/deepcopy.go b/pkg/pb/plan/deepcopy.go similarity index 65% rename from pkg/sql/plan/deepcopy.go rename to pkg/pb/plan/deepcopy.go index 79572ab76c9dd..1cc4f200c9ae9 100644 --- a/pkg/sql/plan/deepcopy.go +++ b/pkg/pb/plan/deepcopy.go @@ -17,8 +17,6 @@ package plan import ( "bytes" "slices" - - "github.com/matrixorigin/matrixone/pkg/pb/plan" ) func DeepCopyExprList(list []*Expr) []*Expr { @@ -32,33 +30,33 @@ func DeepCopyExprList(list []*Expr) []*Expr { return newList } -func DeepCopyOrderBySpec(orderBy *plan.OrderBySpec) *plan.OrderBySpec { +func DeepCopyOrderBySpec(orderBy *OrderBySpec) *OrderBySpec { if orderBy == nil { return nil } - return &plan.OrderBySpec{ + return &OrderBySpec{ Expr: DeepCopyExpr(orderBy.Expr), Collation: orderBy.Collation, Flag: orderBy.Flag, } } -func DeepCopyOrderBySpecList(orderByList []*plan.OrderBySpec) []*plan.OrderBySpec { +func DeepCopyOrderBySpecList(orderByList []*OrderBySpec) []*OrderBySpec { if orderByList == nil { return nil } - newList := make([]*plan.OrderBySpec, len(orderByList)) + newList := make([]*OrderBySpec, len(orderByList)) for idx, orderBy := range orderByList { newList[idx] = DeepCopyOrderBySpec(orderBy) } return newList } -func DeepCopyObjectRef(ref *plan.ObjectRef) *plan.ObjectRef { +func DeepCopyObjectRef(ref *ObjectRef) *ObjectRef { if ref == nil { return nil } - return &plan.ObjectRef{ + return &ObjectRef{ Server: ref.Server, Db: ref.Db, Schema: ref.Schema, @@ -71,10 +69,10 @@ func DeepCopyObjectRef(ref *plan.ObjectRef) *plan.ObjectRef { } } -func DeepCopyUpdateCtxList(updateCtxList []*plan.UpdateCtx) []*plan.UpdateCtx { - result := make([]*plan.UpdateCtx, len(updateCtxList)) +func DeepCopyUpdateCtxList(updateCtxList []*UpdateCtx) []*UpdateCtx { + result := make([]*UpdateCtx, len(updateCtxList)) for i, ctx := range updateCtxList { - result[i] = &plan.UpdateCtx{ + result[i] = &UpdateCtx{ ObjRef: DeepCopyObjectRef(ctx.ObjRef), TableDef: DeepCopyTableDef(ctx.TableDef, true), InsertCols: slices.Clone(ctx.InsertCols), @@ -86,11 +84,11 @@ func DeepCopyUpdateCtxList(updateCtxList []*plan.UpdateCtx) []*plan.UpdateCtx { return result } -func DeepCopyOnDuplicateKeyCtx(ctx *plan.OnDuplicateKeyCtx) *plan.OnDuplicateKeyCtx { +func DeepCopyOnDuplicateKeyCtx(ctx *OnDuplicateKeyCtx) *OnDuplicateKeyCtx { if ctx == nil { return nil } - newCtx := &plan.OnDuplicateKeyCtx{ + newCtx := &OnDuplicateKeyCtx{ OnDuplicateIdx: slices.Clone(ctx.OnDuplicateIdx), } @@ -104,11 +102,11 @@ func DeepCopyOnDuplicateKeyCtx(ctx *plan.OnDuplicateKeyCtx) *plan.OnDuplicateKey return newCtx } -func DeepCopyInsertCtx(ctx *plan.InsertCtx) *plan.InsertCtx { +func DeepCopyInsertCtx(ctx *InsertCtx) *InsertCtx { if ctx == nil { return nil } - newCtx := &plan.InsertCtx{ + newCtx := &InsertCtx{ Ref: DeepCopyObjectRef(ctx.Ref), AddAffectedRows: ctx.AddAffectedRows, IsClusterTable: ctx.IsClusterTable, @@ -118,11 +116,11 @@ func DeepCopyInsertCtx(ctx *plan.InsertCtx) *plan.InsertCtx { return newCtx } -func DeepCopyDeleteCtx(ctx *plan.DeleteCtx) *plan.DeleteCtx { +func DeepCopyDeleteCtx(ctx *DeleteCtx) *DeleteCtx { if ctx == nil { return nil } - newCtx := &plan.DeleteCtx{ + newCtx := &DeleteCtx{ CanTruncate: ctx.CanTruncate, AddAffectedRows: ctx.AddAffectedRows, RowIdIdx: ctx.RowIdIdx, @@ -135,11 +133,11 @@ func DeepCopyDeleteCtx(ctx *plan.DeleteCtx) *plan.DeleteCtx { return newCtx } -func DeepCopyPreInsertCtx(ctx *plan.PreInsertCtx) *plan.PreInsertCtx { +func DeepCopyPreInsertCtx(ctx *PreInsertCtx) *PreInsertCtx { if ctx == nil { return nil } - newCtx := &plan.PreInsertCtx{ + newCtx := &PreInsertCtx{ Ref: DeepCopyObjectRef(ctx.Ref), TableDef: DeepCopyTableDef(ctx.TableDef, true), HasAutoCol: ctx.HasAutoCol, @@ -153,11 +151,11 @@ func DeepCopyPreInsertCtx(ctx *plan.PreInsertCtx) *plan.PreInsertCtx { return newCtx } -func DeepCopyPreInsertUkCtx(ctx *plan.PreInsertUkCtx) *plan.PreInsertUkCtx { +func DeepCopyPreInsertUkCtx(ctx *PreInsertUkCtx) *PreInsertUkCtx { if ctx == nil { return nil } - newCtx := &plan.PreInsertUkCtx{ + newCtx := &PreInsertUkCtx{ Columns: slices.Clone(ctx.Columns), PkColumn: ctx.PkColumn, PkType: ctx.PkType, @@ -167,11 +165,11 @@ func DeepCopyPreInsertUkCtx(ctx *plan.PreInsertUkCtx) *plan.PreInsertUkCtx { return newCtx } -func DeepCopyLockTarget(target *plan.LockTarget) *plan.LockTarget { +func DeepCopyLockTarget(target *LockTarget) *LockTarget { if target == nil { return nil } - return &plan.LockTarget{ + return &LockTarget{ TableId: target.TableId, ObjRef: DeepCopyObjectRef(target.ObjRef), PrimaryColIdxInBat: target.PrimaryColIdxInBat, @@ -185,11 +183,11 @@ func DeepCopyLockTarget(target *plan.LockTarget) *plan.LockTarget { } } -func DeepCopyDedupJoinCtx(ctx *plan.DedupJoinCtx) *plan.DedupJoinCtx { +func DeepCopyDedupJoinCtx(ctx *DedupJoinCtx) *DedupJoinCtx { if ctx == nil { return nil } - newCtx := &plan.DedupJoinCtx{ + newCtx := &DedupJoinCtx{ OldColList: slices.Clone(ctx.OldColList), UpdateColIdxList: slices.Clone(ctx.UpdateColIdxList), UpdateColExprList: DeepCopyExprList(ctx.UpdateColExprList), @@ -198,7 +196,7 @@ func DeepCopyDedupJoinCtx(ctx *plan.DedupJoinCtx) *plan.DedupJoinCtx { return newCtx } -func DeepCopyNode(node *plan.Node) *plan.Node { +func DeepCopyNode(node *Node) *Node { newNode := &Node{ NodeType: node.NodeType, NodeId: node.NodeId, @@ -227,7 +225,7 @@ func DeepCopyNode(node *plan.Node) *plan.Node { PreInsertCtx: DeepCopyPreInsertCtx(node.PreInsertCtx), PreInsertUkCtx: DeepCopyPreInsertUkCtx(node.PreInsertUkCtx), OnDuplicateKey: DeepCopyOnDuplicateKeyCtx(node.OnDuplicateKey), - LockTargets: make([]*plan.LockTarget, len(node.LockTargets)), + LockTargets: make([]*LockTarget, len(node.LockTargets)), AnalyzeInfo: DeepCopyAnalyzeInfo(node.AnalyzeInfo), IsEnd: node.IsEnd, ExternScan: node.ExternScan, @@ -250,7 +248,7 @@ func DeepCopyNode(node *plan.Node) *plan.Node { newNode.ObjRef = DeepCopyObjectRef(node.ObjRef) newNode.ParentObjRef = DeepCopyObjectRef(node.ParentObjRef) - newNode.IndexScanInfo = plan.IndexScanInfo{ + newNode.IndexScanInfo = IndexScanInfo{ IsIndexScan: node.IndexScanInfo.IsIndexScan, IndexName: node.IndexScanInfo.IndexName, BelongToTable: node.IndexScanInfo.BelongToTable, @@ -271,8 +269,8 @@ func DeepCopyNode(node *plan.Node) *plan.Node { } if node.RowsetData != nil { - newNode.RowsetData = &plan.RowsetData{ - Cols: make([]*plan.ColData, len(node.RowsetData.Cols)), + newNode.RowsetData = &RowsetData{ + Cols: make([]*ColData, len(node.RowsetData.Cols)), RowCount: node.RowsetData.RowCount, } @@ -284,18 +282,18 @@ func DeepCopyNode(node *plan.Node) *plan.Node { return newNode } -func DeepCopyIndexReaderParam(oldParam *plan.IndexReaderParam) *plan.IndexReaderParam { +func DeepCopyIndexReaderParam(oldParam *IndexReaderParam) *IndexReaderParam { if oldParam == nil { return nil } - ret := &plan.IndexReaderParam{ + ret := &IndexReaderParam{ OrderBy: DeepCopyOrderBySpecList(oldParam.OrderBy), Limit: DeepCopyExpr(oldParam.Limit), } if oldParam.DistRange != nil { - ret.DistRange = &plan.DistRange{ + ret.DistRange = &DistRange{ LowerBoundType: oldParam.DistRange.LowerBoundType, UpperBoundType: oldParam.DistRange.UpperBoundType, LowerBound: DeepCopyExpr(oldParam.DistRange.LowerBound), @@ -306,11 +304,11 @@ func DeepCopyIndexReaderParam(oldParam *plan.IndexReaderParam) *plan.IndexReader return ret } -func DeepCopyReplaceCtx(oldCtx *plan.ReplaceCtx) *plan.ReplaceCtx { +func DeepCopyReplaceCtx(oldCtx *ReplaceCtx) *ReplaceCtx { if oldCtx == nil { return nil } - ctx := &plan.ReplaceCtx{ + ctx := &ReplaceCtx{ Ref: DeepCopyObjectRef(oldCtx.Ref), AddAffectedRows: oldCtx.AddAffectedRows, IsClusterTable: oldCtx.IsClusterTable, @@ -321,22 +319,22 @@ func DeepCopyReplaceCtx(oldCtx *plan.ReplaceCtx) *plan.ReplaceCtx { return ctx } -func DeepCopyDefault(def *plan.Default) *plan.Default { +func DeepCopyDefault(def *Default) *Default { if def == nil { return nil } - return &plan.Default{ + return &Default{ NullAbility: def.NullAbility, Expr: DeepCopyExpr(def.Expr), OriginString: def.OriginString, } } -func DeepCopyType(typ *plan.Type) *plan.Type { +func DeepCopyType(typ *Type) *Type { if typ == nil { return nil } - return &plan.Type{ + return &Type{ Id: typ.Id, NotNullable: typ.NotNullable, Width: typ.Width, @@ -346,11 +344,11 @@ func DeepCopyType(typ *plan.Type) *plan.Type { } } -func DeepCopyColDef(col *plan.ColDef) *plan.ColDef { +func DeepCopyColDef(col *ColDef) *ColDef { if col == nil { return nil } - return &plan.ColDef{ + return &ColDef{ ColId: col.ColId, Name: col.Name, OriginName: col.OriginName, @@ -369,22 +367,22 @@ func DeepCopyColDef(col *plan.ColDef) *plan.ColDef { } } -func DeepCopyColDefList(colDefs []*plan.ColDef) []*plan.ColDef { +func DeepCopyColDefList(colDefs []*ColDef) []*ColDef { if colDefs == nil { return nil } - newColDefs := make([]*plan.ColDef, len(colDefs)) + newColDefs := make([]*ColDef, len(colDefs)) for i, col := range colDefs { newColDefs[i] = DeepCopyColDef(col) } return newColDefs } -func DeepCopyPrimaryKeyDef(pkeyDef *plan.PrimaryKeyDef) *plan.PrimaryKeyDef { +func DeepCopyPrimaryKeyDef(pkeyDef *PrimaryKeyDef) *PrimaryKeyDef { if pkeyDef == nil { return nil } - def := &plan.PrimaryKeyDef{ + def := &PrimaryKeyDef{ PkeyColName: pkeyDef.PkeyColName, Names: slices.Clone(pkeyDef.Names), } @@ -395,11 +393,11 @@ func DeepCopyPrimaryKeyDef(pkeyDef *plan.PrimaryKeyDef) *plan.PrimaryKeyDef { return def } -func DeepCopyIndexDef(indexDef *plan.IndexDef) *plan.IndexDef { +func DeepCopyIndexDef(indexDef *IndexDef) *IndexDef { if indexDef == nil { return nil } - newindexDef := &plan.IndexDef{ + newindexDef := &IndexDef{ IdxId: indexDef.IdxId, IndexName: indexDef.IndexName, Unique: indexDef.Unique, @@ -416,53 +414,53 @@ func DeepCopyIndexDef(indexDef *plan.IndexDef) *plan.IndexDef { return newindexDef } -func DeepCopyIndexOption(indexOption *plan.IndexOption) *plan.IndexOption { +func DeepCopyIndexOption(indexOption *IndexOption) *IndexOption { if indexOption == nil { return nil } - newIndexOption := &plan.IndexOption{ + newIndexOption := &IndexOption{ CreateExtraTable: indexOption.CreateExtraTable, } return newIndexOption } -func DeepCopyOnUpdate(old *plan.OnUpdate) *plan.OnUpdate { +func DeepCopyOnUpdate(old *OnUpdate) *OnUpdate { if old == nil { return nil } - return &plan.OnUpdate{ + return &OnUpdate{ Expr: DeepCopyExpr(old.Expr), OriginString: old.OriginString, } } -func DeepCopyTableDefList(src []*plan.TableDef) []*plan.TableDef { +func DeepCopyTableDefList(src []*TableDef) []*TableDef { if src == nil { return nil } - ret := make([]*plan.TableDef, len(src)) + ret := make([]*TableDef, len(src)) for i, def := range src { ret[i] = DeepCopyTableDef(def, true) } return ret } -func DeepCopySampleFuncSpec(source *plan.SampleFuncSpec) *plan.SampleFuncSpec { +func DeepCopySampleFuncSpec(source *SampleFuncSpec) *SampleFuncSpec { if source == nil { return nil } - return &plan.SampleFuncSpec{ + return &SampleFuncSpec{ Rows: source.Rows, Percent: source.Percent, } } -func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { +func DeepCopyTableDef(table *TableDef, withCols bool) *TableDef { if table == nil { return nil } - newTable := &plan.TableDef{ + newTable := &TableDef{ TblId: table.TblId, Name: table.Name, Hidden: table.Hidden, @@ -472,11 +470,11 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { Version: table.Version, Pkey: DeepCopyPrimaryKeyDef(table.Pkey), Indexes: make([]*IndexDef, len(table.Indexes)), - Fkeys: make([]*plan.ForeignKeyDef, len(table.Fkeys)), + Fkeys: make([]*ForeignKeyDef, len(table.Fkeys)), RefChildTbls: slices.Clone(table.RefChildTbls), - Checks: make([]*plan.CheckDef, len(table.Checks)), - Props: make([]*plan.PropertyDef, len(table.Props)), - Defs: make([]*plan.TableDef_DefType, len(table.Defs)), + Checks: make([]*CheckDef, len(table.Checks)), + Props: make([]*PropertyDef, len(table.Props)), + Defs: make([]*TableDef_DefType, len(table.Defs)), Name2ColIndex: table.Name2ColIndex, IsLocked: table.IsLocked, TableLockType: table.TableLockType, @@ -496,29 +494,29 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { } for idx, col := range table.Checks { - newTable.Checks[idx] = &plan.CheckDef{ + newTable.Checks[idx] = &CheckDef{ Name: col.Name, Check: DeepCopyExpr(col.Check), } } for idx, prop := range table.Props { - newTable.Props[idx] = &plan.PropertyDef{ + newTable.Props[idx] = &PropertyDef{ Key: prop.Key, Value: prop.Value, } } if table.TblFunc != nil { - newTable.TblFunc = &plan.TableFunction{ + newTable.TblFunc = &TableFunction{ Name: table.TblFunc.Name, Param: slices.Clone(table.TblFunc.Param), } } if table.ClusterBy != nil { - newTable.ClusterBy = &plan.ClusterByDef{ - //Parts: make([]*plan.Expr, len(table.ClusterBy.Parts)), + newTable.ClusterBy = &ClusterByDef{ + //Parts: make([]*Expr, len(table.ClusterBy.Parts)), Name: table.ClusterBy.Name, CompCbkeyCol: DeepCopyColDef(table.ClusterBy.CompCbkeyCol), } @@ -528,7 +526,7 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { } if table.ViewSql != nil { - newTable.ViewSql = &plan.ViewDef{ + newTable.ViewSql = &ViewDef{ View: table.ViewSql.View, } } @@ -540,11 +538,11 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { } if table.Partition != nil { - newTable.Partition = &plan.Partition{ - PartitionDefs: make([]*plan.PartitionDef, len(table.Partition.PartitionDefs)), + newTable.Partition = &Partition{ + PartitionDefs: make([]*PartitionDef, len(table.Partition.PartitionDefs)), } for i, def := range table.Partition.PartitionDefs { - newTable.Partition.PartitionDefs[i] = &plan.PartitionDef{ + newTable.Partition.PartitionDefs[i] = &PartitionDef{ Def: DeepCopyExpr(def.Def), } } @@ -552,18 +550,18 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { for idx, def := range table.Defs { switch defImpl := def.Def.(type) { - case *plan.TableDef_DefType_Properties: - propDef := &plan.PropertiesDef{ - Properties: make([]*plan.Property, len(defImpl.Properties.Properties)), + case *TableDef_DefType_Properties: + propDef := &PropertiesDef{ + Properties: make([]*Property, len(defImpl.Properties.Properties)), } for i, p := range defImpl.Properties.Properties { - propDef.Properties[i] = &plan.Property{ + propDef.Properties[i] = &Property{ Key: p.Key, Value: p.Value, } } - newTable.Defs[idx] = &plan.TableDef_DefType{ - Def: &plan.TableDef_DefType_Properties{ + newTable.Defs[idx] = &TableDef_DefType{ + Def: &TableDef_DefType_Properties{ Properties: propDef, }, } @@ -573,12 +571,12 @@ func DeepCopyTableDef(table *plan.TableDef, withCols bool) *plan.TableDef { return newTable } -func DeepCopyColData(col *plan.ColData) *plan.ColData { - newCol := &plan.ColData{ - Data: make([]*plan.RowsetExpr, len(col.Data)), +func DeepCopyColData(col *ColData) *ColData { + newCol := &ColData{ + Data: make([]*RowsetExpr, len(col.Data)), } for i, e := range col.Data { - newCol.Data[i] = &plan.RowsetExpr{ + newCol.Data[i] = &RowsetExpr{ Expr: DeepCopyExpr(e.Expr), } } @@ -586,11 +584,11 @@ func DeepCopyColData(col *plan.ColData) *plan.ColData { return newCol } -func DeepCopyQuery(qry *plan.Query) *plan.Query { - newQry := &plan.Query{ +func DeepCopyQuery(qry *Query) *Query { + newQry := &Query{ StmtType: qry.StmtType, Steps: qry.Steps, - Nodes: make([]*plan.Node, len(qry.Nodes)), + Nodes: make([]*Node, len(qry.Nodes)), Params: DeepCopyExprList(qry.Params), Headings: qry.Headings, } @@ -604,16 +602,16 @@ func DeepCopyPlan(pl *Plan) *Plan { switch p := pl.Plan.(type) { case *Plan_Query: return &Plan{ - Plan: &plan.Plan_Query{ + Plan: &Plan_Query{ Query: DeepCopyQuery(p.Query), }, IsPrepare: pl.IsPrepare, TryRunTimes: pl.TryRunTimes, } - case *plan.Plan_Ddl: + case *Plan_Ddl: return &Plan{ - Plan: &plan.Plan_Ddl{ + Plan: &Plan_Ddl{ Ddl: DeepCopyDataDefinition(p.Ddl), }, IsPrepare: pl.IsPrepare, @@ -626,8 +624,8 @@ func DeepCopyPlan(pl *Plan) *Plan { } } -func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { - newDf := &plan.DataDefinition{ +func DeepCopyDataDefinition(old *DataDefinition) *DataDefinition { + newDf := &DataDefinition{ DdlType: old.DdlType, } if old.Query != nil { @@ -635,33 +633,33 @@ func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { } switch df := old.Definition.(type) { - case *plan.DataDefinition_CreateDatabase: - newDf.Definition = &plan.DataDefinition_CreateDatabase{ - CreateDatabase: &plan.CreateDatabase{ + case *DataDefinition_CreateDatabase: + newDf.Definition = &DataDefinition_CreateDatabase{ + CreateDatabase: &CreateDatabase{ IfNotExists: df.CreateDatabase.IfNotExists, Database: df.CreateDatabase.Database, }, } - case *plan.DataDefinition_AlterDatabase: - newDf.Definition = &plan.DataDefinition_AlterDatabase{ - AlterDatabase: &plan.AlterDatabase{ + case *DataDefinition_AlterDatabase: + newDf.Definition = &DataDefinition_AlterDatabase{ + AlterDatabase: &AlterDatabase{ IfExists: df.AlterDatabase.IfExists, Database: df.AlterDatabase.Database, }, } - case *plan.DataDefinition_DropDatabase: - newDf.Definition = &plan.DataDefinition_DropDatabase{ - DropDatabase: &plan.DropDatabase{ + case *DataDefinition_DropDatabase: + newDf.Definition = &DataDefinition_DropDatabase{ + DropDatabase: &DropDatabase{ IfExists: df.DropDatabase.IfExists, Database: df.DropDatabase.Database, DatabaseId: df.DropDatabase.DatabaseId, }, } - case *plan.DataDefinition_CreateTable: - CreateTable := &plan.CreateTable{ + case *DataDefinition_CreateTable: + CreateTable := &CreateTable{ Replace: df.CreateTable.Replace, IfNotExists: df.CreateTable.IfNotExists, Temporary: df.CreateTable.Temporary, @@ -670,17 +668,17 @@ func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { IndexTables: DeepCopyTableDefList(df.CreateTable.GetIndexTables()), FkDbs: slices.Clone(df.CreateTable.FkDbs), FkTables: slices.Clone(df.CreateTable.FkTables), - FkCols: make([]*plan.FkColName, len(df.CreateTable.FkCols)), + FkCols: make([]*FkColName, len(df.CreateTable.FkCols)), } for i, val := range df.CreateTable.FkCols { - CreateTable.FkCols[i] = &plan.FkColName{Cols: slices.Clone(val.Cols)} + CreateTable.FkCols[i] = &FkColName{Cols: slices.Clone(val.Cols)} } - newDf.Definition = &plan.DataDefinition_CreateTable{ + newDf.Definition = &DataDefinition_CreateTable{ CreateTable: CreateTable, } - case *plan.DataDefinition_AlterTable: - AlterTable := &plan.AlterTable{ + case *DataDefinition_AlterTable: + AlterTable := &AlterTable{ Database: df.AlterTable.Database, TableDef: DeepCopyTableDef(df.AlterTable.TableDef, true), CopyTableDef: DeepCopyTableDef(df.AlterTable.CopyTableDef, true), @@ -688,41 +686,41 @@ func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { AlgorithmType: df.AlterTable.AlgorithmType, CreateTmpTableSql: df.AlterTable.CreateTmpTableSql, InsertTmpDataSql: df.AlterTable.InsertTmpDataSql, - Actions: make([]*plan.AlterTable_Action, len(df.AlterTable.Actions)), + Actions: make([]*AlterTable_Action, len(df.AlterTable.Actions)), } for i, action := range df.AlterTable.Actions { switch act := action.Action.(type) { - case *plan.AlterTable_Action_Drop: - AlterTable.Actions[i] = &plan.AlterTable_Action{ - Action: &plan.AlterTable_Action_Drop{ - Drop: &plan.AlterTableDrop{ + case *AlterTable_Action_Drop: + AlterTable.Actions[i] = &AlterTable_Action{ + Action: &AlterTable_Action_Drop{ + Drop: &AlterTableDrop{ Typ: act.Drop.Typ, Name: act.Drop.Name, }, }, } - case *plan.AlterTable_Action_AddFk: - AddFk := &plan.AlterTable_Action_AddFk{ - AddFk: &plan.AlterTableAddFk{ + case *AlterTable_Action_AddFk: + AddFk := &AlterTable_Action_AddFk{ + AddFk: &AlterTableAddFk{ DbName: act.AddFk.DbName, TableName: act.AddFk.TableName, Cols: slices.Clone(act.AddFk.Cols), Fkey: DeepCopyFkey(act.AddFk.Fkey), }, } - AlterTable.Actions[i] = &plan.AlterTable_Action{ + AlterTable.Actions[i] = &AlterTable_Action{ Action: AddFk, } } } - newDf.Definition = &plan.DataDefinition_AlterTable{ + newDf.Definition = &DataDefinition_AlterTable{ AlterTable: AlterTable, } - case *plan.DataDefinition_DropTable: - newDf.Definition = &plan.DataDefinition_DropTable{ - DropTable: &plan.DropTable{ + case *DataDefinition_DropTable: + newDf.Definition = &DataDefinition_DropTable{ + DropTable: &DropTable{ IfExists: df.DropTable.IfExists, Database: df.DropTable.Database, Table: df.DropTable.Table, @@ -735,12 +733,12 @@ func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { }, } - case *plan.DataDefinition_CreateIndex: - newDf.Definition = &plan.DataDefinition_CreateIndex{ - CreateIndex: &plan.CreateIndex{ + case *DataDefinition_CreateIndex: + newDf.Definition = &DataDefinition_CreateIndex{ + CreateIndex: &CreateIndex{ Database: df.CreateIndex.Database, Table: df.CreateIndex.Table, - Index: &plan.CreateTable{ + Index: &CreateTable{ IfNotExists: df.CreateIndex.Index.IfNotExists, Temporary: df.CreateIndex.Index.Temporary, Database: df.CreateIndex.Index.Database, @@ -750,58 +748,58 @@ func DeepCopyDataDefinition(old *plan.DataDefinition) *plan.DataDefinition { }, } - case *plan.DataDefinition_AlterIndex: - newDf.Definition = &plan.DataDefinition_AlterIndex{ - AlterIndex: &plan.AlterIndex{ + case *DataDefinition_AlterIndex: + newDf.Definition = &DataDefinition_AlterIndex{ + AlterIndex: &AlterIndex{ Index: df.AlterIndex.Index, }, } - case *plan.DataDefinition_DropIndex: - newDf.Definition = &plan.DataDefinition_DropIndex{ - DropIndex: &plan.DropIndex{ + case *DataDefinition_DropIndex: + newDf.Definition = &DataDefinition_DropIndex{ + DropIndex: &DropIndex{ Database: df.DropIndex.Database, Table: df.DropIndex.Table, IndexName: df.DropIndex.IndexName, }, } - case *plan.DataDefinition_TruncateTable: - truncateTable := &plan.TruncateTable{ + case *DataDefinition_TruncateTable: + truncateTable := &TruncateTable{ Database: df.TruncateTable.Database, Table: df.TruncateTable.Table, ClusterTable: DeepCopyClusterTable(df.TruncateTable.GetClusterTable()), IndexTableNames: slices.Clone(df.TruncateTable.IndexTableNames), } - newDf.Definition = &plan.DataDefinition_TruncateTable{ + newDf.Definition = &DataDefinition_TruncateTable{ TruncateTable: truncateTable, } - case *plan.DataDefinition_ShowVariables: - showVariables := &plan.ShowVariables{ + case *DataDefinition_ShowVariables: + showVariables := &ShowVariables{ Global: df.ShowVariables.Global, Where: DeepCopyExprList(df.ShowVariables.Where), } - newDf.Definition = &plan.DataDefinition_ShowVariables{ + newDf.Definition = &DataDefinition_ShowVariables{ ShowVariables: showVariables, } - case *plan.DataDefinition_LockTables: - newDf.Definition = &plan.DataDefinition_LockTables{ - LockTables: &plan.LockTables{ + case *DataDefinition_LockTables: + newDf.Definition = &DataDefinition_LockTables{ + LockTables: &LockTables{ TableLocks: df.LockTables.TableLocks, }, } - case *plan.DataDefinition_UnlockTables: - newDf.Definition = &plan.DataDefinition_UnlockTables{ - UnlockTables: &plan.UnLockTables{}, + case *DataDefinition_UnlockTables: + newDf.Definition = &DataDefinition_UnlockTables{ + UnlockTables: &UnLockTables{}, } - case *plan.DataDefinition_AlterSequence: - newDf.Definition = &plan.DataDefinition_AlterSequence{ - AlterSequence: &plan.AlterSequence{ + case *DataDefinition_AlterSequence: + newDf.Definition = &DataDefinition_AlterSequence{ + AlterSequence: &AlterSequence{ IfExists: df.AlterSequence.IfExists, Database: df.AlterSequence.Database, TableDef: df.AlterSequence.TableDef, @@ -825,11 +823,11 @@ func DeepCopyFkey(fkey *ForeignKeyDef) *ForeignKeyDef { return def } -func DeepCopyRuntimeFilterSpec(rf *plan.RuntimeFilterSpec) *plan.RuntimeFilterSpec { +func DeepCopyRuntimeFilterSpec(rf *RuntimeFilterSpec) *RuntimeFilterSpec { if rf == nil { return nil } - return &plan.RuntimeFilterSpec{ + return &RuntimeFilterSpec{ Tag: rf.Tag, MatchPrefix: rf.MatchPrefix, UpperLimit: rf.UpperLimit, @@ -849,118 +847,118 @@ func DeepCopyExpr(expr *Expr) *Expr { } switch item := expr.Expr.(type) { - case *plan.Expr_Lit: - pc := &plan.Literal{ + case *Expr_Lit: + pc := &Literal{ Isnull: item.Lit.GetIsnull(), Src: item.Lit.Src, } switch c := item.Lit.Value.(type) { - case *plan.Literal_I8Val: - pc.Value = &plan.Literal_I8Val{I8Val: c.I8Val} - case *plan.Literal_I16Val: - pc.Value = &plan.Literal_I16Val{I16Val: c.I16Val} - case *plan.Literal_I32Val: - pc.Value = &plan.Literal_I32Val{I32Val: c.I32Val} - case *plan.Literal_I64Val: - pc.Value = &plan.Literal_I64Val{I64Val: c.I64Val} - case *plan.Literal_Dval: - pc.Value = &plan.Literal_Dval{Dval: c.Dval} - case *plan.Literal_Sval: - pc.Value = &plan.Literal_Sval{Sval: c.Sval} - case *plan.Literal_Bval: - pc.Value = &plan.Literal_Bval{Bval: c.Bval} - case *plan.Literal_U8Val: - pc.Value = &plan.Literal_U8Val{U8Val: c.U8Val} - case *plan.Literal_U16Val: - pc.Value = &plan.Literal_U16Val{U16Val: c.U16Val} - case *plan.Literal_U32Val: - pc.Value = &plan.Literal_U32Val{U32Val: c.U32Val} - case *plan.Literal_U64Val: - pc.Value = &plan.Literal_U64Val{U64Val: c.U64Val} - case *plan.Literal_Fval: - pc.Value = &plan.Literal_Fval{Fval: c.Fval} - case *plan.Literal_Dateval: - pc.Value = &plan.Literal_Dateval{Dateval: c.Dateval} - case *plan.Literal_Timeval: - pc.Value = &plan.Literal_Timeval{Timeval: c.Timeval} - case *plan.Literal_Datetimeval: - pc.Value = &plan.Literal_Datetimeval{Datetimeval: c.Datetimeval} - case *plan.Literal_Decimal64Val: - pc.Value = &plan.Literal_Decimal64Val{Decimal64Val: &plan.Decimal64{A: c.Decimal64Val.A}} - case *plan.Literal_Decimal128Val: - pc.Value = &plan.Literal_Decimal128Val{Decimal128Val: &plan.Decimal128{A: c.Decimal128Val.A, B: c.Decimal128Val.B}} - case *plan.Literal_Timestampval: - pc.Value = &plan.Literal_Timestampval{Timestampval: c.Timestampval} - case *plan.Literal_Jsonval: - pc.Value = &plan.Literal_Jsonval{Jsonval: c.Jsonval} - case *plan.Literal_Defaultval: - pc.Value = &plan.Literal_Defaultval{Defaultval: c.Defaultval} - case *plan.Literal_UpdateVal: - pc.Value = &plan.Literal_UpdateVal{UpdateVal: c.UpdateVal} - case *plan.Literal_EnumVal: - pc.Value = &plan.Literal_EnumVal{EnumVal: c.EnumVal} - case *plan.Literal_VecVal: - pc.Value = &plan.Literal_VecVal{VecVal: c.VecVal} + case *Literal_I8Val: + pc.Value = &Literal_I8Val{I8Val: c.I8Val} + case *Literal_I16Val: + pc.Value = &Literal_I16Val{I16Val: c.I16Val} + case *Literal_I32Val: + pc.Value = &Literal_I32Val{I32Val: c.I32Val} + case *Literal_I64Val: + pc.Value = &Literal_I64Val{I64Val: c.I64Val} + case *Literal_Dval: + pc.Value = &Literal_Dval{Dval: c.Dval} + case *Literal_Sval: + pc.Value = &Literal_Sval{Sval: c.Sval} + case *Literal_Bval: + pc.Value = &Literal_Bval{Bval: c.Bval} + case *Literal_U8Val: + pc.Value = &Literal_U8Val{U8Val: c.U8Val} + case *Literal_U16Val: + pc.Value = &Literal_U16Val{U16Val: c.U16Val} + case *Literal_U32Val: + pc.Value = &Literal_U32Val{U32Val: c.U32Val} + case *Literal_U64Val: + pc.Value = &Literal_U64Val{U64Val: c.U64Val} + case *Literal_Fval: + pc.Value = &Literal_Fval{Fval: c.Fval} + case *Literal_Dateval: + pc.Value = &Literal_Dateval{Dateval: c.Dateval} + case *Literal_Timeval: + pc.Value = &Literal_Timeval{Timeval: c.Timeval} + case *Literal_Datetimeval: + pc.Value = &Literal_Datetimeval{Datetimeval: c.Datetimeval} + case *Literal_Decimal64Val: + pc.Value = &Literal_Decimal64Val{Decimal64Val: &Decimal64{A: c.Decimal64Val.A}} + case *Literal_Decimal128Val: + pc.Value = &Literal_Decimal128Val{Decimal128Val: &Decimal128{A: c.Decimal128Val.A, B: c.Decimal128Val.B}} + case *Literal_Timestampval: + pc.Value = &Literal_Timestampval{Timestampval: c.Timestampval} + case *Literal_Jsonval: + pc.Value = &Literal_Jsonval{Jsonval: c.Jsonval} + case *Literal_Defaultval: + pc.Value = &Literal_Defaultval{Defaultval: c.Defaultval} + case *Literal_UpdateVal: + pc.Value = &Literal_UpdateVal{UpdateVal: c.UpdateVal} + case *Literal_EnumVal: + pc.Value = &Literal_EnumVal{EnumVal: c.EnumVal} + case *Literal_VecVal: + pc.Value = &Literal_VecVal{VecVal: c.VecVal} } - newExpr.Expr = &plan.Expr_Lit{ + newExpr.Expr = &Expr_Lit{ Lit: pc, } - case *plan.Expr_P: - newExpr.Expr = &plan.Expr_P{ - P: &plan.ParamRef{ + case *Expr_P: + newExpr.Expr = &Expr_P{ + P: &ParamRef{ Pos: item.P.GetPos(), }, } - case *plan.Expr_V: - newExpr.Expr = &plan.Expr_V{ - V: &plan.VarRef{ + case *Expr_V: + newExpr.Expr = &Expr_V{ + V: &VarRef{ Name: item.V.GetName(), Global: item.V.GetGlobal(), System: item.V.GetSystem(), }, } - case *plan.Expr_Col: - newExpr.Expr = &plan.Expr_Col{ - Col: &plan.ColRef{ + case *Expr_Col: + newExpr.Expr = &Expr_Col{ + Col: &ColRef{ RelPos: item.Col.GetRelPos(), ColPos: item.Col.GetColPos(), Name: item.Col.GetName(), }, } - case *plan.Expr_F: + case *Expr_F: newArgs := make([]*Expr, len(item.F.Args)) for idx, arg := range item.F.Args { newArgs[idx] = DeepCopyExpr(arg) } - newExpr.Expr = &plan.Expr_F{ - F: &plan.Function{ + newExpr.Expr = &Expr_F{ + F: &Function{ Func: DeepCopyObjectRef(item.F.Func), Args: newArgs, }, } - case *plan.Expr_W: + case *Expr_W: f := item.W.Frame - newExpr.Expr = &plan.Expr_W{ - W: &plan.WindowSpec{ + newExpr.Expr = &Expr_W{ + W: &WindowSpec{ WindowFunc: DeepCopyExpr(item.W.WindowFunc), PartitionBy: DeepCopyExprList(item.W.PartitionBy), OrderBy: DeepCopyOrderBySpecList(item.W.OrderBy), Name: item.W.Name, - Frame: &plan.FrameClause{ + Frame: &FrameClause{ Type: f.Type, - Start: &plan.FrameBound{ + Start: &FrameBound{ Type: f.Start.Type, UnBounded: f.Start.UnBounded, Val: DeepCopyExpr(f.Start.Val), }, - End: &plan.FrameBound{ + End: &FrameBound{ Type: f.End.Type, UnBounded: f.End.UnBounded, Val: DeepCopyExpr(f.End.Val), @@ -969,9 +967,9 @@ func DeepCopyExpr(expr *Expr) *Expr { }, } - case *plan.Expr_Sub: - newExpr.Expr = &plan.Expr_Sub{ - Sub: &plan.SubqueryRef{ + case *Expr_Sub: + newExpr.Expr = &Expr_Sub{ + Sub: &SubqueryRef{ NodeId: item.Sub.GetNodeId(), Typ: item.Sub.Typ, Op: item.Sub.Op, @@ -980,45 +978,45 @@ func DeepCopyExpr(expr *Expr) *Expr { }, } - case *plan.Expr_Corr: - newExpr.Expr = &plan.Expr_Corr{ - Corr: &plan.CorrColRef{ + case *Expr_Corr: + newExpr.Expr = &Expr_Corr{ + Corr: &CorrColRef{ ColPos: item.Corr.GetColPos(), RelPos: item.Corr.GetRelPos(), Depth: item.Corr.GetDepth(), }, } - case *plan.Expr_T: - newExpr.Expr = &plan.Expr_T{ - T: &plan.TargetType{}, + case *Expr_T: + newExpr.Expr = &Expr_T{ + T: &TargetType{}, } - case *plan.Expr_Max: - newExpr.Expr = &plan.Expr_Max{ - Max: &plan.MaxValue{ + case *Expr_Max: + newExpr.Expr = &Expr_Max{ + Max: &MaxValue{ Value: item.Max.GetValue(), }, } - case *plan.Expr_List: - newExpr.Expr = &plan.Expr_List{ - List: &plan.ExprList{ + case *Expr_List: + newExpr.Expr = &Expr_List{ + List: &ExprList{ List: DeepCopyExprList(item.List.List), }, } - case *plan.Expr_Vec: - newExpr.Expr = &plan.Expr_Vec{ - Vec: &plan.LiteralVec{ + case *Expr_Vec: + newExpr.Expr = &Expr_Vec{ + Vec: &LiteralVec{ Len: item.Vec.Len, Data: bytes.Clone(item.Vec.Data), }, } - case *plan.Expr_Fold: - newExpr.Expr = &plan.Expr_Fold{ - Fold: &plan.FoldVal{ + case *Expr_Fold: + newExpr.Expr = &Expr_Fold{ + Fold: &FoldVal{ Id: item.Fold.Id, IsConst: item.Fold.IsConst, Data: bytes.Clone(item.Fold.Data), @@ -1029,12 +1027,12 @@ func DeepCopyExpr(expr *Expr) *Expr { return newExpr } -func DeepCopyClusterTable(cluster *plan.ClusterTable) *plan.ClusterTable { +func DeepCopyClusterTable(cluster *ClusterTable) *ClusterTable { if cluster == nil { return nil } - newClusterTable := &plan.ClusterTable{ + newClusterTable := &ClusterTable{ IsClusterTable: cluster.GetIsClusterTable(), AccountIDs: slices.Clone(cluster.GetAccountIDs()), ColumnIndexOfAccountId: cluster.GetColumnIndexOfAccountId(), @@ -1042,7 +1040,7 @@ func DeepCopyClusterTable(cluster *plan.ClusterTable) *plan.ClusterTable { return newClusterTable } -func DeepCopyAnalyzeInfo(analyzeinfo *plan.AnalyzeInfo) *plan.AnalyzeInfo { +func DeepCopyAnalyzeInfo(analyzeinfo *AnalyzeInfo) *AnalyzeInfo { if analyzeinfo == nil { return nil } @@ -1057,3 +1055,32 @@ func DeepCopyAnalyzeInfo(analyzeinfo *plan.AnalyzeInfo) *plan.AnalyzeInfo { return ©AnalyzeInfo } + +func DeepCopyStats(stats *Stats) *Stats { + if stats == nil { + return nil + } + var hashmapStats *HashMapStats + if stats.HashmapStats != nil { + hashmapStats = &HashMapStats{ + HashmapSize: stats.HashmapStats.HashmapSize, + HashOnPK: stats.HashmapStats.HashOnPK, + Shuffle: stats.HashmapStats.Shuffle, + ShuffleColIdx: stats.HashmapStats.ShuffleColIdx, + ShuffleType: stats.HashmapStats.ShuffleType, + ShuffleColMin: stats.HashmapStats.ShuffleColMin, + ShuffleColMax: stats.HashmapStats.ShuffleColMax, + ShuffleMethod: stats.HashmapStats.ShuffleMethod, + } + } + return &Stats{ + BlockNum: stats.BlockNum, + Rowsize: stats.Rowsize, + Cost: stats.Cost, + Outcnt: stats.Outcnt, + TableCnt: stats.TableCnt, + Selectivity: stats.Selectivity, + HashmapStats: hashmapStats, + ForceOneCN: stats.ForceOneCN, + } +} diff --git a/pkg/proxy/server_conn_test.go b/pkg/proxy/server_conn_test.go index b7a7645df95e8..eb2900bf12569 100644 --- a/pkg/proxy/server_conn_test.go +++ b/pkg/proxy/server_conn_test.go @@ -29,13 +29,12 @@ import ( "github.com/fagongzi/goetty/v2" "github.com/lni/goutils/leaktest" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/config" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/frontend" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/proxy" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/stretchr/testify/require" ) var testSlat = []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0} diff --git a/pkg/sql/colexec/deletion/deletion_test.go b/pkg/sql/colexec/deletion/deletion_test.go index 10ec800ab742f..ce5fc0407cdf6 100644 --- a/pkg/sql/colexec/deletion/deletion_test.go +++ b/pkg/sql/colexec/deletion/deletion_test.go @@ -21,19 +21,18 @@ import ( "time" "github.com/golang/mock/gomock" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/require" ) func TestString(t *testing.T) { diff --git a/pkg/sql/colexec/deletion/types.go b/pkg/sql/colexec/deletion/types.go index edd9150aaf03a..253e6ad6f92ee 100644 --- a/pkg/sql/colexec/deletion/types.go +++ b/pkg/sql/colexec/deletion/types.go @@ -25,9 +25,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/objectio" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/options" diff --git a/pkg/sql/colexec/dispatch/sendfunc.go b/pkg/sql/colexec/dispatch/sendfunc.go index 233070aba8744..3ee34b3eb039b 100644 --- a/pkg/sql/colexec/dispatch/sendfunc.go +++ b/pkg/sql/colexec/dispatch/sendfunc.go @@ -18,15 +18,13 @@ import ( "context" "fmt" - "github.com/matrixorigin/matrixone/pkg/common/moerr" - "github.com/matrixorigin/matrixone/pkg/container/pSpool" - - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/cnservice/cnclient" + "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" + "github.com/matrixorigin/matrixone/pkg/container/pSpool" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/process" "go.uber.org/zap" ) @@ -231,9 +229,9 @@ func shuffleToAllFunc(bat *batch.Batch, ap *Dispatch, proc *process.Process) (bo ap.ctr.batchCnt[bat.ShuffleIDX]++ ap.ctr.rowCnt[bat.ShuffleIDX] += bat.RowCount() - if ap.ShuffleType == plan2.ShuffleToRegIndex { + if ap.ShuffleType == planner.ShuffleToRegIndex { return false, sendBatToIndex(ap, proc, bat, uint32(bat.ShuffleIDX)) - } else if ap.ShuffleType == plan2.ShuffleToLocalMatchedReg { + } else if ap.ShuffleType == planner.ShuffleToLocalMatchedReg { return false, sendBatToLocalMatchedReg(ap, proc, bat, uint32(bat.ShuffleIDX)) } else { return false, sendBatToMultiMatchedReg(ap, proc, bat, uint32(bat.ShuffleIDX)) diff --git a/pkg/sql/colexec/evalExpression.go b/pkg/sql/colexec/evalExpression.go index 751722fea27ec..f9d6e914c5b62 100644 --- a/pkg/sql/colexec/evalExpression.go +++ b/pkg/sql/colexec/evalExpression.go @@ -29,7 +29,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/datalink" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/index" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/evalExpressionReset.go b/pkg/sql/colexec/evalExpressionReset.go index 7b89ffa75affe..1233d49a48f79 100644 --- a/pkg/sql/colexec/evalExpressionReset.go +++ b/pkg/sql/colexec/evalExpressionReset.go @@ -19,7 +19,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/evalExpression_test.go b/pkg/sql/colexec/evalExpression_test.go index c751c8e87bd64..50572ca9e3cf1 100644 --- a/pkg/sql/colexec/evalExpression_test.go +++ b/pkg/sql/colexec/evalExpression_test.go @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" "github.com/stretchr/testify/require" diff --git a/pkg/sql/colexec/external/external.go b/pkg/sql/colexec/external/external.go index e5f166277f76f..309740a5e59f9 100644 --- a/pkg/sql/colexec/external/external.go +++ b/pkg/sql/colexec/external/external.go @@ -46,7 +46,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/crt" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util/csvparser" "github.com/matrixorigin/matrixone/pkg/util/errutil" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" @@ -99,7 +99,7 @@ func (external *External) Prepare(proc *process.Process) error { if err := json.Unmarshal([]byte(param.CreateSql), param.Extern); err != nil { return err } - if err := plan2.InitS3Param(param.Extern); err != nil { + if err := planner.InitS3Param(param.Extern); err != nil { return err } param.Extern.FileService = proc.Base.FileService @@ -133,8 +133,8 @@ func (external *External) Prepare(proc *process.Process) error { param.tableDef = &plan.TableDef{ Name2ColIndex: name2ColIndex, } - param.Filter.columnMap, _, _, _ = plan2.GetColumnsByExpr(param.Filter.FilterExpr, param.tableDef) - param.Filter.zonemappable = plan2.ExprIsZonemappable(proc.Ctx, param.Filter.FilterExpr) + param.Filter.columnMap, _, _, _ = planner.GetColumnsByExpr(param.Filter.FilterExpr, param.tableDef) + param.Filter.zonemappable = planner.ExprIsZonemappable(proc.Ctx, param.Filter.FilterExpr) if external.ProjectList != nil { err := external.PrepareProjection(proc) if err != nil { @@ -363,7 +363,7 @@ func ReadFileOffset(param *tree.ExternParam, mcpu int, fileSize int64, visibleCo } arr := make([]int64, 0) - fs, readPath, err := plan2.GetForETLWithType(param, param.Filepath) + fs, readPath, err := planner.GetForETLWithType(param, param.Filepath) if err != nil { return nil, err } @@ -922,8 +922,8 @@ func needRead(ctx context.Context, param *ExternalParam, proc *process.Process) vecs []*vector.Vector ) - if isMonoExpr := plan2.ExprIsZonemappable(proc.Ctx, expr); isMonoExpr { - cnt := plan2.AssignAuxIdForExpr(expr, 0) + if isMonoExpr := planner.ExprIsZonemappable(proc.Ctx, expr); isMonoExpr { + cnt := planner.AssignAuxIdForExpr(expr, 0) zms = make([]objectio.ZoneMap, cnt) vecs = make([]*vector.Vector, cnt) } diff --git a/pkg/sql/colexec/external/external_test.go b/pkg/sql/colexec/external/external_test.go index bdf9f3997c070..6a2e53cf25fba 100644 --- a/pkg/sql/colexec/external/external_test.go +++ b/pkg/sql/colexec/external/external_test.go @@ -30,9 +30,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -540,7 +540,7 @@ func TestReadDirSymlink(t *testing.T) { t.Logf("WARNING: os.Stat failed for %s: %v", fooPathInB, statErr) } - files, _, err = plan2.ReadDir(&tree.ExternParam{ + files, _, err = planner.ReadDir(&tree.ExternParam{ ExParamConst: tree.ExParamConst{ Filepath: fooPathInB, }, @@ -571,7 +571,7 @@ func TestReadDirSymlink(t *testing.T) { var files1 []string var maxRetries2 = 3 for i := 0; i < maxRetries2; i++ { - files1, _, err = plan2.ReadDir(&tree.ExternParam{ + files1, _, err = planner.ReadDir(&tree.ExternParam{ ExParamConst: tree.ExParamConst{ Filepath: path1, }, diff --git a/pkg/sql/colexec/external/parquet.go b/pkg/sql/colexec/external/parquet.go index c1b86cb32286f..445465d858b14 100644 --- a/pkg/sql/colexec/external/parquet.go +++ b/pkg/sql/colexec/external/parquet.go @@ -35,7 +35,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/trace" "github.com/matrixorigin/matrixone/pkg/vm/process" "github.com/parquet-go/parquet-go" @@ -122,7 +122,7 @@ func (h *ParquetHandler) openFile(param *ExternalParam) error { case param.Extern.Local: return moerr.NewNYI(param.Ctx, "load parquet local") default: - fs, readPath, err := plan2.GetForETLWithType(param.Extern, param.Fileparam.Filepath) + fs, readPath, err := planner.GetForETLWithType(param.Extern, param.Fileparam.Filepath) if err != nil { return err } diff --git a/pkg/sql/colexec/external/parquet_nested.go b/pkg/sql/colexec/external/parquet_nested.go index 0ff312b72220a..86670b03c6a7c 100644 --- a/pkg/sql/colexec/external/parquet_nested.go +++ b/pkg/sql/colexec/external/parquet_nested.go @@ -25,7 +25,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/bytejson" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/vm/process" "github.com/parquet-go/parquet-go" ) diff --git a/pkg/sql/colexec/external/parquet_string_to_datetime_test.go b/pkg/sql/colexec/external/parquet_string_to_datetime_test.go index 283225e4ac84b..c3c9d27af1cae 100644 --- a/pkg/sql/colexec/external/parquet_string_to_datetime_test.go +++ b/pkg/sql/colexec/external/parquet_string_to_datetime_test.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/parquet-go/parquet-go" "github.com/stretchr/testify/require" diff --git a/pkg/sql/colexec/external/parquet_string_to_decimal_test.go b/pkg/sql/colexec/external/parquet_string_to_decimal_test.go index 124c412564336..dee9c43ea0d7c 100644 --- a/pkg/sql/colexec/external/parquet_string_to_decimal_test.go +++ b/pkg/sql/colexec/external/parquet_string_to_decimal_test.go @@ -20,7 +20,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/parquet-go/parquet-go" "github.com/stretchr/testify/require" diff --git a/pkg/sql/colexec/external/types.go b/pkg/sql/colexec/external/types.go index e62c91df4b558..c08956f4aa4b9 100644 --- a/pkg/sql/colexec/external/types.go +++ b/pkg/sql/colexec/external/types.go @@ -19,20 +19,19 @@ import ( "context" "io" - "github.com/parquet-go/parquet-go" - "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/objectio/ioutil" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/sql/util/csvparser" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/parquet-go/parquet-go" ) var _ vm.Operator = new(External) diff --git a/pkg/sql/colexec/fill/fill_test.go b/pkg/sql/colexec/fill/fill_test.go index 350e934e03c78..8d5051fb3d50a 100644 --- a/pkg/sql/colexec/fill/fill_test.go +++ b/pkg/sql/colexec/fill/fill_test.go @@ -18,14 +18,13 @@ import ( "bytes" "testing" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/filter/filter_test.go b/pkg/sql/colexec/filter/filter_test.go index 72743d5523313..182fd96281f06 100644 --- a/pkg/sql/colexec/filter/filter_test.go +++ b/pkg/sql/colexec/filter/filter_test.go @@ -24,8 +24,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -55,7 +55,7 @@ func makeTestCases(t *testing.T) []filterTestCase { arg: &Filter{ FilterExprs: []*plan.Expr{ { - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ @@ -65,7 +65,7 @@ func makeTestCases(t *testing.T) []filterTestCase { Args: []*plan.Expr{ { - Typ: plan2.MakePlan2Type(&int32Type), + Typ: planner.MakePlan2Type(&int32Type), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -96,7 +96,7 @@ func makeTestCases(t *testing.T) []filterTestCase { arg: &Filter{ FilterExprs: []*plan.Expr{ { - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ @@ -106,7 +106,7 @@ func makeTestCases(t *testing.T) []filterTestCase { Args: []*plan.Expr{ { - Typ: plan2.MakePlan2Type(&int32Type), + Typ: planner.MakePlan2Type(&int32Type), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -121,7 +121,7 @@ func makeTestCases(t *testing.T) []filterTestCase { }, }, { - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ @@ -131,7 +131,7 @@ func makeTestCases(t *testing.T) []filterTestCase { Args: []*plan.Expr{ { - Typ: plan2.MakePlan2Type(&int32Type), + Typ: planner.MakePlan2Type(&int32Type), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -390,7 +390,7 @@ func BenchmarkPlanConstandFold1(b *testing.B) { expr := generateFoldCase1() b.ResetTimer() for i := 0; i < b.N; i++ { - filterExpr, err := plan2.ConstantFold(batch.EmptyForConstFoldBatch, plan2.DeepCopyExpr(expr), proc, true, true) + filterExpr, err := planner.ConstantFold(batch.EmptyForConstFoldBatch, plan.DeepCopyExpr(expr), proc, true, true) require.NoError(b, err) executor, err := colexec.NewExpressionExecutorsFromPlanExpressions(proc, colexec.SplitAndExprs([]*plan.Expr{filterExpr})) require.NoError(b, err) @@ -715,17 +715,17 @@ func TestConstantTranspose(t *testing.T) { } makeAddExpr := func(left, right *plan.Expr) *plan.Expr { - expr, _ := plan2.BindFuncExprImplByPlanExpr(proc.Ctx, "+", []*plan.Expr{left, right}) + expr, _ := planner.BindFuncExprImplByPlanExpr(proc.Ctx, "+", []*plan.Expr{left, right}) return expr } makeSubExpr := func(left, right *plan.Expr) *plan.Expr { - expr, _ := plan2.BindFuncExprImplByPlanExpr(proc.Ctx, "-", []*plan.Expr{left, right}) + expr, _ := planner.BindFuncExprImplByPlanExpr(proc.Ctx, "-", []*plan.Expr{left, right}) return expr } colExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&int32Type), + Typ: planner.MakePlan2Type(&int32Type), Expr: &plan.Expr_Col{ Col: &plan.ColRef{RelPos: 0, ColPos: 0}, }, @@ -739,7 +739,7 @@ func TestConstantTranspose(t *testing.T) { { name: "simple-const-right", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -755,7 +755,7 @@ func TestConstantTranspose(t *testing.T) { { name: "complex-expr", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -770,7 +770,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -788,7 +788,7 @@ func TestConstantTranspose(t *testing.T) { { name: "only-swap-already-simple", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -800,7 +800,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -815,7 +815,7 @@ func TestConstantTranspose(t *testing.T) { { name: "complex-expression-with-multiple-ops", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -839,7 +839,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -866,7 +866,7 @@ func TestConstantTranspose(t *testing.T) { { name: "multiple-constants-in-both-sides", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -884,7 +884,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -905,7 +905,7 @@ func TestConstantTranspose(t *testing.T) { { name: "nested-expressions", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -923,7 +923,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -944,7 +944,7 @@ func TestConstantTranspose(t *testing.T) { { name: "unsupported-expression", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "*", Obj: fid}, @@ -960,7 +960,7 @@ func TestConstantTranspose(t *testing.T) { { name: "more-complex-expression-with-multiple-operations", input: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -993,7 +993,7 @@ func TestConstantTranspose(t *testing.T) { }, }, expect: &plan.Expr{ - Typ: plan2.MakePlan2Type(&boolType), + Typ: planner.MakePlan2Type(&boolType), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ObjName: "=", Obj: fid}, @@ -1030,9 +1030,9 @@ func TestConstantTranspose(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - input := plan2.DeepCopyExpr(tt.input) + input := plan.DeepCopyExpr(tt.input) - result, err := plan2.ConstantTranspose(input, proc) + result, err := planner.ConstantTranspose(input, proc) require.NoError(t, err) if tt.expect == nil { diff --git a/pkg/sql/colexec/fuzzyfilter/filter.go b/pkg/sql/colexec/fuzzyfilter/filter.go index 3c6faf3f16fe1..8d994ae551c83 100644 --- a/pkg/sql/colexec/fuzzyfilter/filter.go +++ b/pkg/sql/colexec/fuzzyfilter/filter.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/message" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -360,8 +360,8 @@ func (fuzzyFilter *FuzzyFilter) appendCollisionKey(proc *process.Process, idx in func (fuzzyFilter *FuzzyFilter) generate() error { ctr := &fuzzyFilter.ctr rbat := batch.NewWithSize(1) - rbat.SetVector(0, vector.NewVec(plan.MakeTypeByPlan2Type(fuzzyFilter.PkTyp))) - ctr.pass2RuntimeFilter = vector.NewVec(plan.MakeTypeByPlan2Type(fuzzyFilter.PkTyp)) + rbat.SetVector(0, vector.NewVec(planner.MakeTypeByPlan2Type(fuzzyFilter.PkTyp))) + ctr.pass2RuntimeFilter = vector.NewVec(planner.MakeTypeByPlan2Type(fuzzyFilter.PkTyp)) ctr.rbat = rbat return nil } diff --git a/pkg/sql/colexec/fuzzyfilter/filter_test.go b/pkg/sql/colexec/fuzzyfilter/filter_test.go index f51a54386c65c..1af1cb4ec759c 100644 --- a/pkg/sql/colexec/fuzzyfilter/filter_test.go +++ b/pkg/sql/colexec/fuzzyfilter/filter_test.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -92,7 +92,7 @@ func makeTestCases(t *testing.T) []fuzzyTestCase { func newArgument(typ types.Type) *FuzzyFilter { arg := new(FuzzyFilter) - arg.PkTyp = plan.MakePlan2Type(&typ) + arg.PkTyp = planner.MakePlan2Type(&typ) arg.Callback = func(bat *batch.Batch) error { if bat == nil || bat.IsEmpty() { return nil diff --git a/pkg/sql/colexec/group/exec2.go b/pkg/sql/colexec/group/exec.go similarity index 100% rename from pkg/sql/colexec/group/exec2.go rename to pkg/sql/colexec/group/exec.go diff --git a/pkg/sql/colexec/group/types2.go b/pkg/sql/colexec/group/types.go similarity index 99% rename from pkg/sql/colexec/group/types2.go rename to pkg/sql/colexec/group/types.go index 5250792ba4a55..01580e4be2ac1 100644 --- a/pkg/sql/colexec/group/types2.go +++ b/pkg/sql/colexec/group/types.go @@ -29,7 +29,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/util/list" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/hashmap_util/hashmap_util.go b/pkg/sql/colexec/hashbuild/hashmap.go similarity index 99% rename from pkg/sql/colexec/hashmap_util/hashmap_util.go rename to pkg/sql/colexec/hashbuild/hashmap.go index 638be2df8ef1a..01c02143af871 100644 --- a/pkg/sql/colexec/hashmap_util/hashmap_util.go +++ b/pkg/sql/colexec/hashbuild/hashmap.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package hashmap_util +package hashbuild import ( "runtime" diff --git a/pkg/sql/colexec/hashmap_util/hashmap_util_test.go b/pkg/sql/colexec/hashbuild/hashmap_test.go similarity index 98% rename from pkg/sql/colexec/hashmap_util/hashmap_util_test.go rename to pkg/sql/colexec/hashbuild/hashmap_test.go index 776810736d37e..24c14eb4147ad 100644 --- a/pkg/sql/colexec/hashmap_util/hashmap_util_test.go +++ b/pkg/sql/colexec/hashbuild/hashmap_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package hashmap_util +package hashbuild import ( "reflect" @@ -33,21 +33,6 @@ import ( "github.com/stretchr/testify/require" ) -func newExpr(pos int32, typ types.Type) *plan.Expr { - return &plan.Expr{ - Typ: plan.Type{ - Id: int32(typ.Oid), - Width: typ.Width, - Scale: typ.Scale, - }, - Expr: &plan.Expr_Col{ - Col: &plan.ColRef{ - ColPos: pos, - }, - }, - } -} - func TestBuildHashMap(t *testing.T) { var hb HashmapBuilder proc := testutil.NewProcessWithMPool(t, "", mpool.MustNewZero()) diff --git a/pkg/sql/colexec/hashbuild/types.go b/pkg/sql/colexec/hashbuild/types.go index 9e34dc715681d..20826be1dd723 100644 --- a/pkg/sql/colexec/hashbuild/types.go +++ b/pkg/sql/colexec/hashbuild/types.go @@ -18,7 +18,6 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/colexec/hashmap_util" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/message" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -36,7 +35,7 @@ const ( type container struct { state int runtimeFilterIn bool - hashmapBuilder hashmap_util.HashmapBuilder + hashmapBuilder HashmapBuilder } type HashBuild struct { diff --git a/pkg/sql/colexec/hashjoin/join_test.go b/pkg/sql/colexec/hashjoin/join_test.go index eb06d0fc5d284..498d1c67df3f2 100644 --- a/pkg/sql/colexec/hashjoin/join_test.go +++ b/pkg/sql/colexec/hashjoin/join_test.go @@ -25,7 +25,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/hashbuild" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/message" diff --git a/pkg/sql/colexec/index_metadata.go b/pkg/sql/colexec/index_metadata.go index b42878e30422c..3e3515c781d8c 100644 --- a/pkg/sql/colexec/index_metadata.go +++ b/pkg/sql/colexec/index_metadata.go @@ -19,15 +19,14 @@ import ( "strconv" "time" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/txn/client" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/limit/limit_test.go b/pkg/sql/colexec/limit/limit_test.go index 7ae363c3f6c3d..6ffcc7278338d 100644 --- a/pkg/sql/colexec/limit/limit_test.go +++ b/pkg/sql/colexec/limit/limit_test.go @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -46,7 +46,7 @@ func makeTestCases(t *testing.T) []limitTestCase { { proc: testutil.NewProcessWithMPool(t, "", mpool.MustNewZero()), arg: &Limit{ - LimitExpr: plan2.MakePlan2Uint64ConstExprWithType(0), + LimitExpr: planner.MakePlan2Uint64ConstExprWithType(0), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 0, @@ -60,7 +60,7 @@ func makeTestCases(t *testing.T) []limitTestCase { { proc: testutil.NewProcessWithMPool(t, "", mpool.MustNewZero()), arg: &Limit{ - LimitExpr: plan2.MakePlan2Uint64ConstExprWithType(1), + LimitExpr: planner.MakePlan2Uint64ConstExprWithType(1), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 0, @@ -77,7 +77,7 @@ func makeTestCases(t *testing.T) []limitTestCase { ctr: container{ seen: 0, }, - LimitExpr: plan2.MakePlan2Uint64ConstExprWithType(5), + LimitExpr: planner.MakePlan2Uint64ConstExprWithType(5), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 0, @@ -141,7 +141,7 @@ func BenchmarkLimit(b *testing.B) { { proc: testutil.NewProcessWithMPool(b, "", mpool.MustNewZero()), arg: &Limit{ - LimitExpr: plan2.MakePlan2Uint64ConstExprWithType(8), + LimitExpr: planner.MakePlan2Uint64ConstExprWithType(8), }, }, } diff --git a/pkg/sql/colexec/limit/types.go b/pkg/sql/colexec/limit/types.go index a721753813b13..4f5bb55a948b5 100644 --- a/pkg/sql/colexec/limit/types.go +++ b/pkg/sql/colexec/limit/types.go @@ -17,8 +17,8 @@ package limit import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/lockop/lock_op.go b/pkg/sql/colexec/lockop/lock_op.go index 44e3019efa5d6..43ab251352b09 100644 --- a/pkg/sql/colexec/lockop/lock_op.go +++ b/pkg/sql/colexec/lockop/lock_op.go @@ -33,10 +33,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/lock" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/txn/trace" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" @@ -933,7 +934,7 @@ func (lockOp *LockOp) CopyToPipelineTarget() []*pipeline.LockTarget { targets[i] = &pipeline.LockTarget{ TableId: target.tableID, PrimaryColIdxInBat: target.primaryColumnIndexInBatch, - PrimaryColTyp: plan.MakePlan2Type(&target.primaryColumnType), + PrimaryColTyp: planner.MakePlan2Type(&target.primaryColumnType), RefreshTsIdxInBat: target.refreshTimestampIndexInBatch, FilterColIdxInBat: target.filterColIndexInBatch, LockTable: target.lockTable, diff --git a/pkg/sql/colexec/lockop/types.go b/pkg/sql/colexec/lockop/types.go index 41183d1b9ad97..061e6f2cd40ed 100644 --- a/pkg/sql/colexec/lockop/types.go +++ b/pkg/sql/colexec/lockop/types.go @@ -21,8 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/lock" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/loopjoin/join_test.go b/pkg/sql/colexec/loopjoin/join_test.go index 81dde26ff993d..d3abaab1c7eb8 100644 --- a/pkg/sql/colexec/loopjoin/join_test.go +++ b/pkg/sql/colexec/loopjoin/join_test.go @@ -27,7 +27,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/hashbuild" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/message" diff --git a/pkg/sql/colexec/mergeblock/types.go b/pkg/sql/colexec/mergeblock/types.go index 92314d9d83f2c..9d890a8a35d2a 100644 --- a/pkg/sql/colexec/mergeblock/types.go +++ b/pkg/sql/colexec/mergeblock/types.go @@ -14,8 +14,6 @@ package mergeblock import ( - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" @@ -25,12 +23,13 @@ import ( "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/objectio" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/perfcounter" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" ) var _ vm.Operator = new(MergeBlock) diff --git a/pkg/sql/colexec/mergeorder/order.go b/pkg/sql/colexec/mergeorder/order.go index a59ab780b5fae..86e09f47b32c6 100644 --- a/pkg/sql/colexec/mergeorder/order.go +++ b/pkg/sql/colexec/mergeorder/order.go @@ -21,9 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -63,10 +62,10 @@ func (ctr *container) generateCompares(fs []*plan.OrderBySpec) { var desc, nullsLast bool ctr.compares = make([]compare.Compare, len(fs)) for i := range ctr.compares { - desc = fs[i].Flag&plan2.OrderBySpec_DESC != 0 - if fs[i].Flag&plan2.OrderBySpec_NULLS_FIRST != 0 { + desc = fs[i].Flag&plan.OrderBySpec_DESC != 0 + if fs[i].Flag&plan.OrderBySpec_NULLS_FIRST != 0 { nullsLast = false - } else if fs[i].Flag&plan2.OrderBySpec_NULLS_LAST != 0 { + } else if fs[i].Flag&plan.OrderBySpec_NULLS_LAST != 0 { nullsLast = true } else { nullsLast = desc diff --git a/pkg/sql/colexec/mergeorder/types.go b/pkg/sql/colexec/mergeorder/types.go index c64bdf859bbff..7d3392d8ec8bc 100644 --- a/pkg/sql/colexec/mergeorder/types.go +++ b/pkg/sql/colexec/mergeorder/types.go @@ -20,8 +20,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/compare" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/mergetop/top_test.go b/pkg/sql/colexec/mergetop/top_test.go index 20ce28c560299..40f67cf50085c 100644 --- a/pkg/sql/colexec/mergetop/top_test.go +++ b/pkg/sql/colexec/mergetop/top_test.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -169,7 +169,7 @@ func newTestCase(t testing.TB, ds []bool, ts []types.Type, limit int64, fs []*pl proc: proc, arg: &MergeTop{ Fs: fs, - Limit: plan2.MakePlan2Uint64ConstExprWithType(uint64(limit)), + Limit: planner.MakePlan2Uint64ConstExprWithType(uint64(limit)), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 0, diff --git a/pkg/sql/colexec/multi_update/delete_test.go b/pkg/sql/colexec/multi_update/delete_test.go index cc5e232335236..99ff26a5df715 100644 --- a/pkg/sql/colexec/multi_update/delete_test.go +++ b/pkg/sql/colexec/multi_update/delete_test.go @@ -22,8 +22,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/multi_update/insert.go b/pkg/sql/colexec/multi_update/insert.go index 28b0e66f861c2..87f62fcc0a218 100644 --- a/pkg/sql/colexec/multi_update/insert.go +++ b/pkg/sql/colexec/multi_update/insert.go @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/perfcounter" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -45,7 +45,7 @@ func (update *MultiUpdate) insert_main_table( if col.Name == catalog.Row_ID { continue } - bat.Vecs[len(attrs)] = vector.NewVec(plan.MakeTypeByPlan2Type(col.Typ)) + bat.Vecs[len(attrs)] = vector.NewVec(planner.MakeTypeByPlan2Type(col.Typ)) attrs = append(attrs, col.GetOriginCaseName()) } bat.SetAttributes(attrs) diff --git a/pkg/sql/colexec/multi_update/insert_test.go b/pkg/sql/colexec/multi_update/insert_test.go index dfcfb218f4644..da8ab49ccd413 100644 --- a/pkg/sql/colexec/multi_update/insert_test.go +++ b/pkg/sql/colexec/multi_update/insert_test.go @@ -22,8 +22,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/multi_update/s3writer_delegate.go b/pkg/sql/colexec/multi_update/s3writer_delegate.go index ddf60e445d6fb..3b057d9386626 100644 --- a/pkg/sql/colexec/multi_update/s3writer_delegate.go +++ b/pkg/sql/colexec/multi_update/s3writer_delegate.go @@ -37,7 +37,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/deletion" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/containers" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/options" @@ -498,9 +498,9 @@ func (writer *s3WriterDelegate) sortAndSyncOneTable( } if isTombstone { - pkCol := plan2.PkColByTableDef(tblDef) + pkCol := planner.PkColByTableDef(tblDef) s3Writer = colexec.NewCNS3TombstoneWriter( - proc.Mp(), fs, plan2.ExprType2Type(&pkCol.Typ), -1, opts..., + proc.Mp(), fs, planner.ExprType2Type(&pkCol.Typ), -1, opts..., ) } else { s3Writer = colexec.NewCNS3DataWriter(proc.Mp(), fs, tblDef, -1, false, opts...) diff --git a/pkg/sql/colexec/multi_update/types.go b/pkg/sql/colexec/multi_update/types.go index 615c0b08495e7..782c1b58893f1 100644 --- a/pkg/sql/colexec/multi_update/types.go +++ b/pkg/sql/colexec/multi_update/types.go @@ -17,7 +17,7 @@ package multi_update import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/multi_update/update_test.go b/pkg/sql/colexec/multi_update/update_test.go index 6451b1954dd9f..a51ebd1486f6b 100644 --- a/pkg/sql/colexec/multi_update/update_test.go +++ b/pkg/sql/colexec/multi_update/update_test.go @@ -24,8 +24,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" diff --git a/pkg/sql/colexec/multi_update/util_for_test.go b/pkg/sql/colexec/multi_update/util_for_test.go index 536420bae0d5b..ac2a50559fb41 100644 --- a/pkg/sql/colexec/multi_update/util_for_test.go +++ b/pkg/sql/colexec/multi_update/util_for_test.go @@ -32,10 +32,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/fileservice" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" - pbPlan "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/toml" "github.com/matrixorigin/matrixone/pkg/vm" @@ -223,7 +222,7 @@ func getTestMainTable() (*plan.ObjectRef, *plan.TableDef) { Name: "t1", Hidden: false, Cols: []*plan.ColDef{ - {ColId: 0, Name: "a", Typ: i64typ, NotNull: true, Primary: true, Default: &pbPlan.Default{ + {ColId: 0, Name: "a", Typ: i64typ, NotNull: true, Primary: true, Default: &plan.Default{ NullAbility: false, }}, {ColId: 1, Name: "b", Typ: varcharTyp, NotNull: true}, @@ -257,10 +256,10 @@ func getTestUniqueIndexTable(uniqueTblName string) (*plan.ObjectRef, *plan.Table Name: uniqueTblName, Hidden: true, Cols: []*plan.ColDef{ - {ColId: 0, Name: catalog.IndexTableIndexColName, Typ: varcharTyp, NotNull: true, Primary: true, Default: &pbPlan.Default{ + {ColId: 0, Name: catalog.IndexTableIndexColName, Typ: varcharTyp, NotNull: true, Primary: true, Default: &plan.Default{ NullAbility: false, }}, - {ColId: 1, Name: catalog.IndexTablePrimaryColName, Typ: i64typ, NotNull: true, Default: &pbPlan.Default{ + {ColId: 1, Name: catalog.IndexTablePrimaryColName, Typ: i64typ, NotNull: true, Default: &plan.Default{ NullAbility: false, }}, {ColId: 2, Name: catalog.Row_ID, Typ: rowIdTyp}, @@ -289,7 +288,7 @@ func getTestSecondaryIndexTable(secondaryIdxTblName string) (*plan.ObjectRef, *p Name: secondaryIdxTblName, Hidden: true, Cols: []*plan.ColDef{ - {ColId: 0, Name: catalog.IndexTableIndexColName, Typ: varcharTyp, NotNull: true, Primary: true, Default: &pbPlan.Default{ + {ColId: 0, Name: catalog.IndexTableIndexColName, Typ: varcharTyp, NotNull: true, Primary: true, Default: &plan.Default{ NullAbility: false, }}, {ColId: 1, Name: catalog.IndexTablePrimaryColName, Typ: i64typ, NotNull: true}, diff --git a/pkg/sql/colexec/offset/offset_test.go b/pkg/sql/colexec/offset/offset_test.go index 4c13d023f9d7a..f4debfb7fada8 100644 --- a/pkg/sql/colexec/offset/offset_test.go +++ b/pkg/sql/colexec/offset/offset_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -50,7 +50,7 @@ func makeTestCases(t *testing.T) []offsetTestCase { types.T_int8.ToType(), }, arg: &Offset{ - OffsetExpr: plan2.MakePlan2Uint64ConstExprWithType(8), + OffsetExpr: planner.MakePlan2Uint64ConstExprWithType(8), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 1, @@ -66,7 +66,7 @@ func makeTestCases(t *testing.T) []offsetTestCase { types.T_int8.ToType(), }, arg: &Offset{ - OffsetExpr: plan2.MakePlan2Uint64ConstExprWithType(10), + OffsetExpr: planner.MakePlan2Uint64ConstExprWithType(10), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 1, @@ -82,7 +82,7 @@ func makeTestCases(t *testing.T) []offsetTestCase { types.T_int8.ToType(), }, arg: &Offset{ - OffsetExpr: plan2.MakePlan2Uint64ConstExprWithType(12), + OffsetExpr: planner.MakePlan2Uint64ConstExprWithType(12), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 1, @@ -152,7 +152,7 @@ func BenchmarkOffset(b *testing.B) { ctr: container{ seen: 0, }, - OffsetExpr: plan2.MakePlan2Uint64ConstExprWithType(8), + OffsetExpr: planner.MakePlan2Uint64ConstExprWithType(8), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 1, diff --git a/pkg/sql/colexec/offset/types.go b/pkg/sql/colexec/offset/types.go index c6f3ef638aafc..51db32f72942f 100644 --- a/pkg/sql/colexec/offset/types.go +++ b/pkg/sql/colexec/offset/types.go @@ -17,8 +17,8 @@ package offset import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/onduplicatekey/on_duplicate_key.go b/pkg/sql/colexec/onduplicatekey/on_duplicate_key.go index e70d94deef693..8600d1519efea 100644 --- a/pkg/sql/colexec/onduplicatekey/on_duplicate_key.go +++ b/pkg/sql/colexec/onduplicatekey/on_duplicate_key.go @@ -24,7 +24,6 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -293,7 +292,7 @@ func updateOldBatch(evalBatch *batch.Batch, updateExpr map[string]*plan.Expr, pr if i < columnCount { // update insert cols if expr, exists := updateExpr[attr]; exists { - runExpr := plan2.DeepCopyExpr(expr) + runExpr := plan.DeepCopyExpr(expr) resetColPos(runExpr, columnCount) newVec, err := colexec.GetWritableResultFromExpression(proc, runExpr, []*batch.Batch{evalBatch}) if err != nil { diff --git a/pkg/sql/colexec/onduplicatekey/on_duplicate_key_test.go b/pkg/sql/colexec/onduplicatekey/on_duplicate_key_test.go index 56758f03b86b3..15e937058bff5 100644 --- a/pkg/sql/colexec/onduplicatekey/on_duplicate_key_test.go +++ b/pkg/sql/colexec/onduplicatekey/on_duplicate_key_test.go @@ -27,7 +27,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -108,7 +108,7 @@ func newTestCase(t *testing.T) onDupTestCase { proc := testutil.NewProcessWithMPool(t, "", mpool.MustNewZero()) pkType := types.T_int64.ToType() leftExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&pkType), + Typ: planner.MakePlan2Type(&pkType), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -117,7 +117,7 @@ func newTestCase(t *testing.T) onDupTestCase { }, } rightExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&pkType), + Typ: planner.MakePlan2Type(&pkType), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 1, @@ -125,10 +125,10 @@ func newTestCase(t *testing.T) onDupTestCase { }, }, } - eqExpr, _ := plan2.BindFuncExprImplByPlanExpr(context.TODO(), "=", []*plan.Expr{leftExpr, rightExpr}) + eqExpr, _ := planner.BindFuncExprImplByPlanExpr(context.TODO(), "=", []*plan.Expr{leftExpr, rightExpr}) onDupMap := make(map[string]*plan.Expr) - onDupMap["b"] = plan2.MakePlan2Int64ConstExprWithType(10) + onDupMap["b"] = planner.MakePlan2Int64ConstExprWithType(10) return onDupTestCase{ proc: proc, diff --git a/pkg/sql/colexec/onduplicatekey/types.go b/pkg/sql/colexec/onduplicatekey/types.go index 8689494bb016c..90dcb6b399564 100644 --- a/pkg/sql/colexec/onduplicatekey/types.go +++ b/pkg/sql/colexec/onduplicatekey/types.go @@ -17,8 +17,8 @@ package onduplicatekey import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/order/types.go b/pkg/sql/colexec/order/types.go index 6055dab28a94c..becb4ec226ad0 100644 --- a/pkg/sql/colexec/order/types.go +++ b/pkg/sql/colexec/order/types.go @@ -19,8 +19,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/partition/partition.go b/pkg/sql/colexec/partition/partition.go index 4bb738acc3a21..b73b753d7d391 100644 --- a/pkg/sql/colexec/partition/partition.go +++ b/pkg/sql/colexec/partition/partition.go @@ -21,9 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -159,10 +158,10 @@ func (ctr *container) generateCompares(fs []*plan.OrderBySpec) { ctr.compares = make([]compare.Compare, len(fs)) for i := range ctr.compares { - desc = fs[i].Flag&plan2.OrderBySpec_DESC != 0 - if fs[i].Flag&plan2.OrderBySpec_NULLS_FIRST != 0 { + desc = fs[i].Flag&plan.OrderBySpec_DESC != 0 + if fs[i].Flag&plan.OrderBySpec_NULLS_FIRST != 0 { nullsLast = false - } else if fs[i].Flag&plan2.OrderBySpec_NULLS_LAST != 0 { + } else if fs[i].Flag&plan.OrderBySpec_NULLS_LAST != 0 { nullsLast = true } else { nullsLast = desc diff --git a/pkg/sql/colexec/postdml/postdml_test.go b/pkg/sql/colexec/postdml/postdml_test.go index 4597d783d9ea9..865ba8ee954c5 100644 --- a/pkg/sql/colexec/postdml/postdml_test.go +++ b/pkg/sql/colexec/postdml/postdml_test.go @@ -24,9 +24,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" diff --git a/pkg/sql/colexec/postdml/types.go b/pkg/sql/colexec/postdml/types.go index 6598378e1ba64..644cfc438813c 100644 --- a/pkg/sql/colexec/postdml/types.go +++ b/pkg/sql/colexec/postdml/types.go @@ -17,7 +17,7 @@ package postdml import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/s3util.go b/pkg/sql/colexec/s3util.go index 79459bb041eb6..7b068e01de8dd 100644 --- a/pkg/sql/colexec/s3util.go +++ b/pkg/sql/colexec/s3util.go @@ -132,7 +132,7 @@ func GetSequmsAttrsSortKeyIdxFromTableDef( // the condition of sortIdx == -1 may be unnecessary. if sortKeyIdx == -1 && tableDef.ClusterBy != nil { // the rowId column has been excluded from the TableDef of the target table for the insert statements(insert,load). - // link: pkg/sql/plan/build_constraint_util.go --> func setTableExprToDmlTableInfo, + // link: pkg/sql/planner/build_constraint_util.go --> func setTableExprToDmlTableInfo, // and the sortKeyIdx position can be directly obtained by using a name that matches the sorting key. for idx, colDef := range tableDef.Cols { if colDef.Name == tableDef.ClusterBy.Name { diff --git a/pkg/sql/colexec/sample/types.go b/pkg/sql/colexec/sample/types.go index e10009533afee..ec72f5ac55b0e 100644 --- a/pkg/sql/colexec/sample/types.go +++ b/pkg/sql/colexec/sample/types.go @@ -20,9 +20,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" - planpb "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -123,7 +122,7 @@ func NewMergeSample(rowSampleArg *Sample, outputRowCount bool) *Sample { newSampleExpr := make([]*plan.Expr, len(rowSampleArg.SampleExprs)) for i, expr := range rowSampleArg.GroupExprs { newGroupExpr[i] = &plan.Expr{ - Expr: &planpb.Expr_Col{ + Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, ColPos: int32(i), @@ -134,7 +133,7 @@ func NewMergeSample(rowSampleArg *Sample, outputRowCount bool) *Sample { } for i, expr := range rowSampleArg.SampleExprs { newSampleExpr[i] = &plan.Expr{ - Expr: &planpb.Expr_Col{ + Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, ColPos: int32(i + len(rowSampleArg.GroupExprs)), diff --git a/pkg/sql/colexec/shuffle/shuffle.go b/pkg/sql/colexec/shuffle/shuffle.go index 14f122d6e7ad4..4e1a9d5ba235c 100644 --- a/pkg/sql/colexec/shuffle/shuffle.go +++ b/pkg/sql/colexec/shuffle/shuffle.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/message" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -180,25 +180,25 @@ func shuffleConstVectorByHash(ap *Shuffle, bat *batch.Batch) uint64 { switch groupByVec.GetType().Oid { case types.T_int64: groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_int32: groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_int16: groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint64: groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint32: groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint16: groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) - return plan2.SimpleCharHashToRange(groupByCol[0].GetByteSlice(area), lenRegs) + return planner.SimpleCharHashToRange(groupByCol[0].GetByteSlice(area), lenRegs) default: panic("unsupported shuffle type, wrong plan!") //something got wrong here! } @@ -214,7 +214,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -223,7 +223,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -232,7 +232,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -241,7 +241,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(v, lenRegs) + regIndex = planner.SimpleInt64HashToRange(v, lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -250,7 +250,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -259,7 +259,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -268,7 +268,7 @@ func getShuffledSelsByHashWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), lenRegs) + regIndex = planner.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -286,55 +286,55 @@ func getShuffledSelsByHashWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 { case types.T_int64: groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_int32: groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_int16: groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint64: groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(v, bucketNum) + regIndex := planner.SimpleInt64HashToRange(v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint32: groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint16: groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_decimal64: groupByCol := vector.MustFixedColNoTypeCheck[types.Decimal64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_decimal128: groupByCol := vector.MustFixedColNoTypeCheck[types.Decimal128](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v.B0_63^v.B64_127), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v.B0_63^v.B64_127), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) for row := range groupByCol { - regIndex := plan2.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), bucketNum) + regIndex := planner.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } default: @@ -432,11 +432,11 @@ func allBatchInOneRange(ap *Shuffle, bat *batch.Batch) (bool, uint64) { } case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) - firstValueUnsigned = plan2.VarlenaToUint64(&groupByCol[0], area) + firstValueUnsigned = planner.VarlenaToUint64(&groupByCol[0], area) if groupByVec.IsConst() { lastValueUnsigned = firstValueUnsigned } else { - lastValueUnsigned = plan2.VarlenaToUint64(&groupByCol[groupByVec.Length()-1], area) + lastValueUnsigned = planner.VarlenaToUint64(&groupByCol[groupByVec.Length()-1], area) } default: panic("unsupported shuffle type, wrong plan!") //something got wrong here! @@ -444,17 +444,17 @@ func allBatchInOneRange(ap *Shuffle, bat *batch.Batch) (bool, uint64) { var regIndexFirst, regIndexLast uint64 if ap.ShuffleRangeInt64 != nil { - regIndexFirst = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, firstValueSigned) - regIndexLast = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, lastValueSigned) + regIndexFirst = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, firstValueSigned) + regIndexLast = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, lastValueSigned) } else if ap.ShuffleRangeUint64 != nil { - regIndexFirst = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, firstValueUnsigned) - regIndexLast = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, lastValueUnsigned) + regIndexFirst = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, firstValueUnsigned) + regIndexLast = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, lastValueUnsigned) } else if signed { - regIndexFirst = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, firstValueSigned, bucketNum) - regIndexLast = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, lastValueSigned, bucketNum) + regIndexFirst = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, firstValueSigned, bucketNum) + regIndexLast = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, lastValueSigned, bucketNum) } else { - regIndexFirst = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), firstValueUnsigned, bucketNum) - regIndexLast = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), lastValueUnsigned, bucketNum) + regIndexFirst = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), firstValueUnsigned, bucketNum) + regIndexLast = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), lastValueUnsigned, bucketNum) } if regIndexFirst == regIndexLast { @@ -473,12 +473,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -486,12 +486,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -499,12 +499,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -512,12 +512,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -525,12 +525,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -538,12 +538,12 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -552,28 +552,28 @@ func getShuffledSelsByRangeWithoutNull(ap *Shuffle, bat *batch.Batch) [][]int32 if area == nil { if ap.ShuffleRangeUint64 != nil { for row := range groupByCol { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row := range groupByCol { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } } else { if ap.ShuffleRangeUint64 != nil { for row := range groupByCol { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row := range groupByCol { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -595,7 +595,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -603,7 +603,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -614,7 +614,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -622,7 +622,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -633,7 +633,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -641,7 +641,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -652,7 +652,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -660,7 +660,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -671,7 +671,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -679,7 +679,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -690,7 +690,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -698,7 +698,7 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -710,8 +710,8 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -719,8 +719,8 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -730,8 +730,8 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -739,8 +739,8 @@ func getShuffledSelsByRangeWithNull(ap *Shuffle, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } diff --git a/pkg/sql/colexec/shuffleV2/shuffle.go b/pkg/sql/colexec/shuffleV2/shuffle.go index 14ce10a6414da..92e26b0e1939a 100644 --- a/pkg/sql/colexec/shuffleV2/shuffle.go +++ b/pkg/sql/colexec/shuffleV2/shuffle.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -141,25 +141,25 @@ func shuffleConstVectorByHash(ap *ShuffleV2, bat *batch.Batch) uint64 { switch groupByVec.GetType().Oid { case types.T_int64: groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_int32: groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_int16: groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint64: groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint32: groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_uint16: groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) - return plan2.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) + return planner.SimpleInt64HashToRange(uint64(groupByCol[0]), lenRegs) case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) - return plan2.SimpleCharHashToRange(groupByCol[0].GetByteSlice(area), lenRegs) + return planner.SimpleCharHashToRange(groupByCol[0].GetByteSlice(area), lenRegs) default: panic("unsupported shuffle type, wrong plan!") //something got wrong here! } @@ -175,7 +175,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -184,7 +184,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -193,7 +193,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -202,7 +202,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(v, lenRegs) + regIndex = planner.SimpleInt64HashToRange(v, lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -211,7 +211,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -220,7 +220,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleInt64HashToRange(uint64(v), lenRegs) + regIndex = planner.SimpleInt64HashToRange(uint64(v), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -229,7 +229,7 @@ func getShuffledSelsByHashWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), lenRegs) + regIndex = planner.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), lenRegs) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -247,55 +247,55 @@ func getShuffledSelsByHashWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 case types.T_int64: groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_int32: groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_int16: groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint64: groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(v, bucketNum) + regIndex := planner.SimpleInt64HashToRange(v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint32: groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_uint16: groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_decimal64: groupByCol := vector.MustFixedColNoTypeCheck[types.Decimal64](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_decimal128: groupByCol := vector.MustFixedColNoTypeCheck[types.Decimal128](groupByVec) for row, v := range groupByCol { - regIndex := plan2.SimpleInt64HashToRange(uint64(v.B0_63^v.B64_127), bucketNum) + regIndex := planner.SimpleInt64HashToRange(uint64(v.B0_63^v.B64_127), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) for row := range groupByCol { - regIndex := plan2.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), bucketNum) + regIndex := planner.SimpleCharHashToRange(groupByCol[row].GetByteSlice(area), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } default: @@ -394,11 +394,11 @@ func allBatchInOneRange(ap *ShuffleV2, bat *batch.Batch) (bool, uint64) { } case types.T_char, types.T_varchar, types.T_text: groupByCol, area := vector.MustVarlenaRawData(groupByVec) - firstValueUnsigned = plan2.VarlenaToUint64(&groupByCol[0], area) + firstValueUnsigned = planner.VarlenaToUint64(&groupByCol[0], area) if groupByVec.IsConst() { lastValueUnsigned = firstValueUnsigned } else { - lastValueUnsigned = plan2.VarlenaToUint64(&groupByCol[groupByVec.Length()-1], area) + lastValueUnsigned = planner.VarlenaToUint64(&groupByCol[groupByVec.Length()-1], area) } default: panic("unsupported shuffle type, wrong plan!") //something got wrong here! @@ -406,17 +406,17 @@ func allBatchInOneRange(ap *ShuffleV2, bat *batch.Batch) (bool, uint64) { var regIndexFirst, regIndexLast uint64 if ap.ShuffleRangeInt64 != nil { - regIndexFirst = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, firstValueSigned) - regIndexLast = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, lastValueSigned) + regIndexFirst = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, firstValueSigned) + regIndexLast = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, lastValueSigned) } else if ap.ShuffleRangeUint64 != nil { - regIndexFirst = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, firstValueUnsigned) - regIndexLast = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, lastValueUnsigned) + regIndexFirst = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, firstValueUnsigned) + regIndexLast = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, lastValueUnsigned) } else if signed { - regIndexFirst = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, firstValueSigned, bucketNum) - regIndexLast = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, lastValueSigned, bucketNum) + regIndexFirst = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, firstValueSigned, bucketNum) + regIndexLast = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, lastValueSigned, bucketNum) } else { - regIndexFirst = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), firstValueUnsigned, bucketNum) - regIndexLast = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), lastValueUnsigned, bucketNum) + regIndexFirst = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), firstValueUnsigned, bucketNum) + regIndexLast = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), lastValueUnsigned, bucketNum) } if regIndexFirst == regIndexLast { @@ -435,12 +435,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[int64](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -448,12 +448,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[int32](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -461,12 +461,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[int16](groupByVec) if ap.ShuffleRangeInt64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex := planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -474,12 +474,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[uint64](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -487,12 +487,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[uint32](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -500,12 +500,12 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 groupByCol := vector.MustFixedColNoTypeCheck[uint16](groupByVec) if ap.ShuffleRangeUint64 != nil { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row, v := range groupByCol { - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), uint64(v), bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -514,28 +514,28 @@ func getShuffledSelsByRangeWithoutNull(ap *ShuffleV2, bat *batch.Batch) [][]int3 if area == nil { if ap.ShuffleRangeUint64 != nil { for row := range groupByCol { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row := range groupByCol { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } } else { if ap.ShuffleRangeUint64 != nil { for row := range groupByCol { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex := plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex := planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) sels[regIndex] = append(sels[regIndex], int32(row)) } } else { for row := range groupByCol { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex := plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex := planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) sels[regIndex] = append(sels[regIndex], int32(row)) } } @@ -557,7 +557,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -565,7 +565,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -576,7 +576,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -584,7 +584,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -595,7 +595,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) + regIndex = planner.GetRangeShuffleIndexSignedSlice(ap.ShuffleRangeInt64, int64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -603,7 +603,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -614,7 +614,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -622,7 +622,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -633,7 +633,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -641,7 +641,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -652,7 +652,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, uint64(v)) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -660,7 +660,7 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row, v := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - regIndex = plan2.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) + regIndex = planner.GetRangeShuffleIndexSignedMinMax(ap.ShuffleColMin, ap.ShuffleColMax, int64(v), bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -672,8 +672,8 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -681,8 +681,8 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64Inline(&groupByCol[row]) - regIndex = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64Inline(&groupByCol[row]) + regIndex = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -692,8 +692,8 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex = plan2.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex = planner.GetRangeShuffleIndexUnsignedSlice(ap.ShuffleRangeUint64, v) } sels[regIndex] = append(sels[regIndex], int32(row)) } @@ -701,8 +701,8 @@ func getShuffledSelsByRangeWithNull(ap *ShuffleV2, bat *batch.Batch) [][]int32 { for row := range groupByCol { var regIndex uint64 = 0 if !groupByVec.IsNull(uint64(row)) { - v := plan2.VarlenaToUint64(&groupByCol[row], area) - regIndex = plan2.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) + v := planner.VarlenaToUint64(&groupByCol[row], area) + regIndex = planner.GetRangeShuffleIndexUnsignedMinMax(uint64(ap.ShuffleColMin), uint64(ap.ShuffleColMax), v, bucketNum) } sels[regIndex] = append(sels[regIndex], int32(row)) } diff --git a/pkg/sql/colexec/table_function/generate_series.go b/pkg/sql/colexec/table_function/generate_series.go index f529cd8d16f64..e38e2848cc543 100644 --- a/pkg/sql/colexec/table_function/generate_series.go +++ b/pkg/sql/colexec/table_function/generate_series.go @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -243,7 +243,7 @@ func (g *generateSeriesArg) start(tf *TableFunction, proc *process.Process, nthR // reset schema typ := types.T_datetime.ToType() typ.Scale = g.dtState.scale - tf.Rets[0].Typ = plan2.MakePlan2Type(&typ) + tf.Rets[0].Typ = planner.MakePlan2Type(&typ) tf.ctr.retSchema[0] = typ default: return moerr.NewNotSupportedf(proc.Ctx, "generate_series not support type %s", resTyp.Oid.String()) diff --git a/pkg/sql/colexec/table_function/hnsw_create_f64_test.go b/pkg/sql/colexec/table_function/hnsw_create_f64_test.go index a7c53ada6a4e9..65426e73d4401 100644 --- a/pkg/sql/colexec/table_function/hnsw_create_f64_test.go +++ b/pkg/sql/colexec/table_function/hnsw_create_f64_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -222,7 +222,7 @@ func makeConstInputExprsHnswCreateF64() []*plan.Expr { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } return ret @@ -282,7 +282,7 @@ func makeBatchHnswCreateFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -332,7 +332,7 @@ func makeBatchHnswCreateFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -382,7 +382,7 @@ func makeBatchHnswCreateFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -492,7 +492,7 @@ func makeBatchHnswCreateFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1]", 3), } bat := batch.NewWithSize(3) diff --git a/pkg/sql/colexec/table_function/hnsw_create_test.go b/pkg/sql/colexec/table_function/hnsw_create_test.go index 2d3467605c8cb..4fcacccb79ea5 100644 --- a/pkg/sql/colexec/table_function/hnsw_create_test.go +++ b/pkg/sql/colexec/table_function/hnsw_create_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vectorindex/sqlexec" @@ -256,7 +256,7 @@ func makeConstInputExprsHnswCreate() []*plan.Expr { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } return ret @@ -321,7 +321,7 @@ func makeBatchHnswCreateFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -371,7 +371,7 @@ func makeBatchHnswCreateFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -421,7 +421,7 @@ func makeBatchHnswCreateFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(3) @@ -531,7 +531,7 @@ func makeBatchHnswCreateFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1]", 3), } bat := batch.NewWithSize(3) diff --git a/pkg/sql/colexec/table_function/hnsw_search_f64_test.go b/pkg/sql/colexec/table_function/hnsw_search_f64_test.go index 81064e6690c9b..b18e708b6eba5 100644 --- a/pkg/sql/colexec/table_function/hnsw_search_f64_test.go +++ b/pkg/sql/colexec/table_function/hnsw_search_f64_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -209,7 +209,7 @@ func makeConstInputExprsHnswSearchF64() []*plan.Expr { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } return ret @@ -253,7 +253,7 @@ func makeBatchHnswSearchFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) @@ -288,7 +288,7 @@ func makeBatchHnswSearchFailF64(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf64ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) diff --git a/pkg/sql/colexec/table_function/hnsw_search_test.go b/pkg/sql/colexec/table_function/hnsw_search_test.go index fba55b4cbba79..0468429fcfecb 100644 --- a/pkg/sql/colexec/table_function/hnsw_search_test.go +++ b/pkg/sql/colexec/table_function/hnsw_search_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vectorindex" "github.com/matrixorigin/matrixone/pkg/vectorindex/cache" @@ -276,7 +276,7 @@ func makeConstInputExprsHnswSearch() []*plan.Expr { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } return ret @@ -320,7 +320,7 @@ func makeBatchHnswSearchFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) @@ -355,7 +355,7 @@ func makeBatchHnswSearchFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) @@ -436,7 +436,7 @@ func makeBatchHnswSearchFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1]", 3), } bat := batch.NewWithSize(2) diff --git a/pkg/sql/colexec/table_function/ivf_search_test.go b/pkg/sql/colexec/table_function/ivf_search_test.go index 265fcd2da8ba9..d6f14b63cbb1d 100644 --- a/pkg/sql/colexec/table_function/ivf_search_test.go +++ b/pkg/sql/colexec/table_function/ivf_search_test.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vectorindex" @@ -286,7 +286,7 @@ func makeConstInputExprsIvfSearch() []*plan.Expr { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } return ret @@ -330,7 +330,7 @@ func makeBatchIvfSearchFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) @@ -365,7 +365,7 @@ func makeBatchIvfSearchFail(proc *process.Process) []failBatch { }, }, - plan2.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), + planner.MakePlan2Vecf32ConstExprWithType("[0,1,2]", 3), } bat := batch.NewWithSize(2) diff --git a/pkg/sql/colexec/table_function/parsejsonl.go b/pkg/sql/colexec/table_function/parsejsonl.go index 0a95ae835ac42..f9b3a5c6d9851 100644 --- a/pkg/sql/colexec/table_function/parsejsonl.go +++ b/pkg/sql/colexec/table_function/parsejsonl.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/crt" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" json2 "github.com/segmentio/encoding/json" @@ -38,7 +38,7 @@ type parseJsonlState struct { scanner *bufio.Scanner batch *batch.Batch fromData bool - opts plan.ParseJsonlOptions + opts planner.ParseJsonlOptions appender []func(proc *process.Process, vec *vector.Vector, v any) error } @@ -61,19 +61,19 @@ func parseJsonlPrepare(fromData bool, proc *process.Process, tblArg *TableFuncti st.appender = make([]func(proc *process.Process, vec *vector.Vector, v any) error, len(st.opts.Cols)) for i, col := range st.opts.Cols { switch col.Type { - case plan.ParseJsonlTypeBool: + case planner.ParseJsonlTypeBool: st.appender[i] = typedAppend_bool - case plan.ParseJsonlTypeInt32: + case planner.ParseJsonlTypeInt32: st.appender[i] = typedAppend_int32 - case plan.ParseJsonlTypeInt64: + case planner.ParseJsonlTypeInt64: st.appender[i] = typedAppend_int64 - case plan.ParseJsonlTypeFloat32: + case planner.ParseJsonlTypeFloat32: st.appender[i] = typedAppend_float32 - case plan.ParseJsonlTypeFloat64: + case planner.ParseJsonlTypeFloat64: st.appender[i] = typedAppend_float64 - case plan.ParseJsonlTypeString: + case planner.ParseJsonlTypeString: st.appender[i] = typedAppend_string - case plan.ParseJsonlTypeTimestamp: + case planner.ParseJsonlTypeTimestamp: st.appender[i] = typedAppend_timestamp default: // Should never reach here. @@ -123,7 +123,7 @@ func (st *parseJsonlState) start(tf *TableFunction, proc *process.Process, nthRo } } - if st.opts.Format == plan.ParseJsonlFormatLine { + if st.opts.Format == planner.ParseJsonlFormatLine { st.scanner = bufio.NewScanner(st.reader) } else { st.dec = json2.NewDecoder(st.reader) @@ -172,7 +172,7 @@ func (st *parseJsonlState) call(tf *TableFunction, proc *process.Process) (vm.Ca var cnt int switch st.opts.Format { - case plan.ParseJsonlFormatLine: + case planner.ParseJsonlFormatLine: for st.scanner.Scan() { line := st.scanner.Text() vector.AppendBytes(st.batch.Vecs[0], []byte(line), false, proc.Mp()) @@ -181,7 +181,7 @@ func (st *parseJsonlState) call(tf *TableFunction, proc *process.Process) (vm.Ca break } } - case plan.ParseJsonlFormatArray: + case planner.ParseJsonlFormatArray: var vv [][]any for { var v []any @@ -225,7 +225,7 @@ func (st *parseJsonlState) call(tf *TableFunction, proc *process.Process) (vm.Ca } } - case plan.ParseJsonlFormatObject: + case planner.ParseJsonlFormatObject: var objs []map[string]any for { var v map[string]any diff --git a/pkg/sql/colexec/table_function/system_view.go b/pkg/sql/colexec/table_function/system_view.go index 17a7386db03d1..6ada0ed9497a6 100644 --- a/pkg/sql/colexec/table_function/system_view.go +++ b/pkg/sql/colexec/table_function/system_view.go @@ -34,7 +34,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/query" "github.com/matrixorigin/matrixone/pkg/queryservice" qclient "github.com/matrixorigin/matrixone/pkg/queryservice/client" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -87,11 +87,11 @@ func moLocksPrepare(proc *process.Process, tf *TableFunction) (tvfState, error) tf.ctr.retSchema = make([]types.Type, len(tf.Attrs)) for i, col := range tf.Attrs { col = strings.ToLower(col) - idx, ok := plan2.MoLocksColName2Index[col] + idx, ok := planner.MoLocksColName2Index[col] if !ok { return nil, moerr.NewInternalErrorf(proc.Ctx, "invalid column name %s", col) } - tf.ctr.retSchema[i] = plan2.MoLocksColTypes[idx] + tf.ctr.retSchema[i] = planner.MoLocksColTypes[idx] } } if len(tf.ctr.retSchema) != len(tf.Attrs) { @@ -152,17 +152,17 @@ func (s *moLocksState) start(tf *TableFunction, proc *process.Process, nthRow in wList := lock.GetWaiters() wLen := len(wList) - record := make([][]byte, len(plan2.MoLocksColNames)) - record[plan2.MoLocksColTypeCnId] = []byte(cnId) - //record[plan2.MoLocksColTypeSessionId] = []byte(sessionId) - record[plan2.MoLocksColTypeTxnId] = []byte(txnId) - record[plan2.MoLocksColTypeTableId] = []byte(tableId) - //record[plan2.MoLocksColTypeTableName] = []byte(tableName) - record[plan2.MoLocksColTypeLockKey] = []byte(lockKey) - record[plan2.MoLocksColTypeLockContent] = []byte(lockContent) - record[plan2.MoLocksColTypeLockMode] = []byte(lockMode) - record[plan2.MoLocksColTypeLockStatus] = []byte(lockStatus) - record[plan2.MoLocksColTypeLockWait] = []byte(lockWait) + record := make([][]byte, len(planner.MoLocksColNames)) + record[planner.MoLocksColTypeCnId] = []byte(cnId) + //record[planner.MoLocksColTypeSessionId] = []byte(sessionId) + record[planner.MoLocksColTypeTxnId] = []byte(txnId) + record[planner.MoLocksColTypeTableId] = []byte(tableId) + //record[planner.MoLocksColTypeTableName] = []byte(tableName) + record[planner.MoLocksColTypeLockKey] = []byte(lockKey) + record[planner.MoLocksColTypeLockContent] = []byte(lockContent) + record[planner.MoLocksColTypeLockMode] = []byte(lockMode) + record[planner.MoLocksColTypeLockStatus] = []byte(lockStatus) + record[planner.MoLocksColTypeLockWait] = []byte(lockWait) if hLen == 0 && wLen == 0 { //one record if err := fillLockRecord(proc, tf.Attrs, bat, record); err != nil { @@ -171,7 +171,7 @@ func (s *moLocksState) start(tf *TableFunction, proc *process.Process, nthRow in } else if hLen == 0 && wLen != 0 { //wLen records for j := 0; j < wLen; j++ { - record[plan2.MoLocksColTypeLockWait] = []byte(hex.EncodeToString(wList[j].GetTxnID())) + record[planner.MoLocksColTypeLockWait] = []byte(hex.EncodeToString(wList[j].GetTxnID())) if err := fillLockRecord(proc, tf.Attrs, bat, record); err != nil { return err } @@ -179,7 +179,7 @@ func (s *moLocksState) start(tf *TableFunction, proc *process.Process, nthRow in } else if hLen != 0 && wLen == 0 { //hLen records for j := 0; j < hLen; j++ { - record[plan2.MoLocksColTypeTxnId] = []byte(hex.EncodeToString(hList[j].GetTxnID())) + record[planner.MoLocksColTypeTxnId] = []byte(hex.EncodeToString(hList[j].GetTxnID())) if err := fillLockRecord(proc, tf.Attrs, bat, record); err != nil { return err } @@ -188,8 +188,8 @@ func (s *moLocksState) start(tf *TableFunction, proc *process.Process, nthRow in //hLen * wLen records for j := 0; j < hLen; j++ { for k := 0; k < wLen; k++ { - record[plan2.MoLocksColTypeTxnId] = []byte(hex.EncodeToString(hList[j].GetTxnID())) - record[plan2.MoLocksColTypeLockWait] = []byte(hex.EncodeToString(wList[k].GetTxnID())) + record[planner.MoLocksColTypeTxnId] = []byte(hex.EncodeToString(hList[j].GetTxnID())) + record[planner.MoLocksColTypeLockWait] = []byte(hex.EncodeToString(wList[k].GetTxnID())) if err := fillLockRecord(proc, tf.Attrs, bat, record); err != nil { return err } @@ -205,7 +205,7 @@ func (s *moLocksState) start(tf *TableFunction, proc *process.Process, nthRow in func fillLockRecord(proc *process.Process, attrs []string, bat *batch.Batch, record [][]byte) error { for colIdx, attr := range attrs { - realColIdx := plan2.MoLocksColName2Index[strings.ToLower(attr)] + realColIdx := planner.MoLocksColName2Index[strings.ToLower(attr)] if err := vector.AppendBytes(bat.Vecs[colIdx], record[realColIdx], false, proc.GetMPool()); err != nil { return err } @@ -255,11 +255,11 @@ func moConfigurationsPrepare(proc *process.Process, tf *TableFunction) (tvfState tf.ctr.retSchema = make([]types.Type, len(tf.Attrs)) for i, col := range tf.Attrs { col = strings.ToLower(col) - idx, ok := plan2.MoConfigColName2Index[col] + idx, ok := planner.MoConfigColName2Index[col] if !ok { return nil, moerr.NewInternalErrorf(proc.Ctx, "invalid column name %s", col) } - tf.ctr.retSchema[i] = plan2.MoConfigColTypes[idx] + tf.ctr.retSchema[i] = planner.MoConfigColTypes[idx] } } if len(tf.ctr.retSchema) != len(tf.Attrs) { @@ -338,28 +338,28 @@ func fillMapToBatch(nodeType, nodeId string, attrs []string, kvs map[string]*log for _, value := range kvs { for i, col := range attrs { col = strings.ToLower(col) - switch plan2.MoConfigColType(plan2.MoConfigColName2Index[col]) { - case plan2.MoConfigColTypeNodeType: + switch planner.MoConfigColType(planner.MoConfigColName2Index[col]) { + case planner.MoConfigColTypeNodeType: if err = vector.AppendBytes(bat.Vecs[i], []byte(nodeType), false, mp); err != nil { return err } - case plan2.MoConfigColTypeNodeId: + case planner.MoConfigColTypeNodeId: if err = vector.AppendBytes(bat.Vecs[i], []byte(nodeId), false, mp); err != nil { return err } - case plan2.MoConfigColTypeName: + case planner.MoConfigColTypeName: if err = vector.AppendBytes(bat.Vecs[i], []byte(value.GetName()), false, mp); err != nil { return err } - case plan2.MoConfigColTypeCurrentValue: + case planner.MoConfigColTypeCurrentValue: if err = vector.AppendBytes(bat.Vecs[i], []byte(value.GetCurrentValue()), false, mp); err != nil { return err } - case plan2.MoConfigColTypeDefaultValue: + case planner.MoConfigColTypeDefaultValue: if err = vector.AppendBytes(bat.Vecs[i], []byte(value.GetDefaultValue()), false, mp); err != nil { return err } - case plan2.MoConfigColTypeInternal: + case planner.MoConfigColTypeInternal: if err = vector.AppendBytes(bat.Vecs[i], []byte(value.GetInternal()), false, mp); err != nil { return err } @@ -378,11 +378,11 @@ func moTransactionsPrepare(proc *process.Process, tf *TableFunction) (tvfState, tf.ctr.retSchema = make([]types.Type, len(tf.Attrs)) for i, col := range tf.Attrs { col = strings.ToLower(col) - idx, ok := plan2.MoTransactionsColName2Index[col] + idx, ok := planner.MoTransactionsColName2Index[col] if !ok { return nil, moerr.NewInternalErrorf(proc.Ctx, "invalid column name %s", col) } - tf.ctr.retSchema[i] = plan2.MoTransactionsColTypes[idx] + tf.ctr.retSchema[i] = planner.MoTransactionsColTypes[idx] } } if len(tf.ctr.retSchema) != len(tf.Attrs) { @@ -465,24 +465,24 @@ func (s *moTransactionsState) start(tf *TableFunction, proc *process.Process, nt } waitLocksCnt := len(txn.GetWaitLocks()) - record := make([][]byte, len(plan2.MoTransactionsColNames)) - record[plan2.MoTransactionsColTypeCnId] = []byte(cnId) - record[plan2.MoTransactionsColTypeTxnId] = []byte(txnId) - record[plan2.MoTransactionsColTypeCreateTs] = []byte(createTs) - record[plan2.MoTransactionsColTypeSnapshotTs] = []byte(snapshotTs) - record[plan2.MoTransactionsColTypePreparedTs] = []byte(preparedTs) - record[plan2.MoTransactionsColTypeCommitTs] = []byte(commitTs) - record[plan2.MoTransactionsColTypeTxnMode] = []byte(txnMode) - record[plan2.MoTransactionsColTypeIsolation] = []byte(isolation) - record[plan2.MoTransactionsColTypeUserTxn] = []byte(userTxn) - record[plan2.MoTransactionsColTypeTxnStatus] = []byte(txnStatus) + record := make([][]byte, len(planner.MoTransactionsColNames)) + record[planner.MoTransactionsColTypeCnId] = []byte(cnId) + record[planner.MoTransactionsColTypeTxnId] = []byte(txnId) + record[planner.MoTransactionsColTypeCreateTs] = []byte(createTs) + record[planner.MoTransactionsColTypeSnapshotTs] = []byte(snapshotTs) + record[planner.MoTransactionsColTypePreparedTs] = []byte(preparedTs) + record[planner.MoTransactionsColTypeCommitTs] = []byte(commitTs) + record[planner.MoTransactionsColTypeTxnMode] = []byte(txnMode) + record[planner.MoTransactionsColTypeIsolation] = []byte(isolation) + record[planner.MoTransactionsColTypeUserTxn] = []byte(userTxn) + record[planner.MoTransactionsColTypeTxnStatus] = []byte(txnStatus) if waitLocksCnt == 0 { //one record - record[plan2.MoTransactionsColTypeTableId] = []byte{} - record[plan2.MoTransactionsColTypeLockKey] = []byte{} - record[plan2.MoTransactionsColTypeLockContent] = []byte{} - record[plan2.MoTransactionsColTypeLockMode] = []byte{} + record[planner.MoTransactionsColTypeTableId] = []byte{} + record[planner.MoTransactionsColTypeLockKey] = []byte{} + record[planner.MoTransactionsColTypeLockContent] = []byte{} + record[planner.MoTransactionsColTypeLockMode] = []byte{} if err := fillTxnRecord(proc, tf.Attrs, bat, record); err != nil { return err @@ -497,14 +497,14 @@ func (s *moTransactionsState) start(tf *TableFunction, proc *process.Process, nt //table id tableId := fmt.Sprintf("%d", lock.GetTableId()) - record[plan2.MoTransactionsColTypeTableId] = []byte(tableId) + record[planner.MoTransactionsColTypeTableId] = []byte(tableId) //lock key lockKey := "point" if options.GetGranularity() == pblock.Granularity_Range { lockKey = "range" } - record[plan2.MoTransactionsColTypeLockKey] = []byte(lockKey) + record[planner.MoTransactionsColTypeLockKey] = []byte(lockKey) //lock content lockContent := "" @@ -515,11 +515,11 @@ func (s *moTransactionsState) start(tf *TableFunction, proc *process.Process, nt } else { lockContent = hex.EncodeToString(getPointContent(lock)) } - record[plan2.MoTransactionsColTypeLockContent] = []byte(lockContent) + record[planner.MoTransactionsColTypeLockContent] = []byte(lockContent) //lock mode lockMode := options.GetMode().String() - record[plan2.MoTransactionsColTypeLockMode] = []byte(lockMode) + record[planner.MoTransactionsColTypeLockMode] = []byte(lockMode) if err := fillTxnRecord(proc, tf.Attrs, bat, record); err != nil { return err @@ -535,7 +535,7 @@ func (s *moTransactionsState) start(tf *TableFunction, proc *process.Process, nt func fillTxnRecord(proc *process.Process, attrs []string, bat *batch.Batch, record [][]byte) error { for colIdx, attr := range attrs { - realColIdx := plan2.MoTransactionsColName2Index[strings.ToLower(attr)] + realColIdx := planner.MoTransactionsColName2Index[strings.ToLower(attr)] if err := vector.AppendBytes(bat.Vecs[colIdx], record[realColIdx], false, proc.GetMPool()); err != nil { return err } @@ -584,11 +584,11 @@ func moCachePrepare(proc *process.Process, tf *TableFunction) (tvfState, error) tf.ctr.retSchema = make([]types.Type, len(tf.Attrs)) for i, col := range tf.Attrs { col = strings.ToLower(col) - idx, ok := plan2.MoCacheColName2Index[col] + idx, ok := planner.MoCacheColName2Index[col] if !ok { return nil, moerr.NewInternalErrorf(proc.Ctx, "invalid column name %s", col) } - tf.ctr.retSchema[i] = plan2.MoCacheColTypes[idx] + tf.ctr.retSchema[i] = planner.MoCacheColTypes[idx] } return &moCacheState{}, nil } @@ -625,28 +625,28 @@ func (s *moCacheState) start(tf *TableFunction, proc *process.Process, nthRow in func fillCacheRecord(proc *process.Process, attrs []string, bat *batch.Batch, cache *query.CacheInfo) error { var err error for colIdx, attr := range attrs { - switch plan2.MoCacheColType(plan2.MoCacheColName2Index[strings.ToLower(attr)]) { - case plan2.MoCacheColTypeNodeType: + switch planner.MoCacheColType(planner.MoCacheColName2Index[strings.ToLower(attr)]) { + case planner.MoCacheColTypeNodeType: if err = vector.AppendBytes(bat.Vecs[colIdx], []byte(cache.GetNodeType()), false, proc.GetMPool()); err != nil { return err } - case plan2.MoCacheColTypeNodeId: + case planner.MoCacheColTypeNodeId: if err = vector.AppendBytes(bat.Vecs[colIdx], []byte(cache.GetNodeId()), false, proc.GetMPool()); err != nil { return err } - case plan2.MoCacheColTypeType: + case planner.MoCacheColTypeType: if err = vector.AppendBytes(bat.Vecs[colIdx], []byte(cache.GetCacheType()), false, proc.GetMPool()); err != nil { return err } - case plan2.MoCacheColTypeUsed: + case planner.MoCacheColTypeUsed: if err = vector.AppendFixed(bat.Vecs[colIdx], cache.GetUsed(), false, proc.GetMPool()); err != nil { return err } - case plan2.MoCacheColTypeFree: + case planner.MoCacheColTypeFree: if err = vector.AppendFixed(bat.Vecs[colIdx], cache.GetFree(), false, proc.GetMPool()); err != nil { return err } - case plan2.MoCacheColTypeHitRatio: + case planner.MoCacheColTypeHitRatio: if err = vector.AppendFixed(bat.Vecs[colIdx], cache.GetHitRatio(), false, proc.GetMPool()); err != nil { return err } diff --git a/pkg/sql/colexec/table_function/unnest.go b/pkg/sql/colexec/table_function/unnest.go index 82cf5231bec69..4160c2ded4220 100644 --- a/pkg/sql/colexec/table_function/unnest.go +++ b/pkg/sql/colexec/table_function/unnest.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -127,11 +127,11 @@ func unnestPrepare(proc *process.Process, arg *TableFunction) (tvfState, error) if len(arg.Args) == 1 { vType := types.T_varchar.ToType() bType := types.T_bool.ToType() - arg.Args = append(arg.Args, &plan.Expr{Typ: plan2.MakePlan2Type(&vType), Expr: &plan.Expr_Lit{Lit: &plan2.Const{Value: &plan.Literal_Sval{Sval: "$"}}}}) - arg.Args = append(arg.Args, &plan.Expr{Typ: plan2.MakePlan2Type(&bType), Expr: &plan.Expr_Lit{Lit: &plan2.Const{Value: &plan.Literal_Bval{Bval: false}}}}) + arg.Args = append(arg.Args, &plan.Expr{Typ: planner.MakePlan2Type(&vType), Expr: &plan.Expr_Lit{Lit: &plan.Literal{Value: &plan.Literal_Sval{Sval: "$"}}}}) + arg.Args = append(arg.Args, &plan.Expr{Typ: planner.MakePlan2Type(&bType), Expr: &plan.Expr_Lit{Lit: &plan.Literal{Value: &plan.Literal_Bval{Bval: false}}}}) } else if len(arg.Args) == 2 { bType := types.T_bool.ToType() - arg.Args = append(arg.Args, &plan.Expr{Typ: plan2.MakePlan2Type(&bType), Expr: &plan.Expr_Lit{Lit: &plan2.Const{Value: &plan.Literal_Bval{Bval: false}}}}) + arg.Args = append(arg.Args, &plan.Expr{Typ: planner.MakePlan2Type(&bType), Expr: &plan.Expr_Lit{Lit: &plan.Literal{Value: &plan.Literal_Bval{Bval: false}}}}) } dt, err := json.Marshal(st.param) if err != nil { diff --git a/pkg/sql/colexec/table_scan/table_scan.go b/pkg/sql/colexec/table_scan/table_scan.go index d6d91c54ec1c5..70e09b1744136 100644 --- a/pkg/sql/colexec/table_scan/table_scan.go +++ b/pkg/sql/colexec/table_scan/table_scan.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/perfcounter" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/txn/trace" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" @@ -56,7 +56,7 @@ func (tableScan *TableScan) Prepare(proc *process.Process) (err error) { tableScan.ctr.buf = batch.NewOffHeapWithSize(len(tableScan.Types)) tableScan.ctr.buf.Attrs = append(tableScan.ctr.buf.Attrs, tableScan.Attrs...) for i := range tableScan.Types { - tableScan.ctr.buf.Vecs[i] = vector.NewOffHeapVecWithType(plan.MakeTypeByPlan2Type(tableScan.Types[i])) + tableScan.ctr.buf.Vecs[i] = vector.NewOffHeapVecWithType(planner.MakeTypeByPlan2Type(tableScan.Types[i])) } } return diff --git a/pkg/sql/colexec/table_scan/table_scan_test.go b/pkg/sql/colexec/table_scan/table_scan_test.go index 2f0a4b468383a..e8c7acc95e2e9 100644 --- a/pkg/sql/colexec/table_scan/table_scan_test.go +++ b/pkg/sql/colexec/table_scan/table_scan_test.go @@ -20,19 +20,19 @@ import ( "testing" "github.com/golang/mock/gomock" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" + "github.com/stretchr/testify/require" ) func TestString(t *testing.T) { @@ -83,7 +83,7 @@ func TestCall(t *testing.T) { arg := &TableScan{ Reader: reader, Attrs: []string{catalog.Row_ID, "int_col", "varchar_col"}, - Types: []plan.Type{plan.MakePlan2Type(&typ1), plan.MakePlan2Type(&typ2), plan.MakePlan2Type(&typ3)}, + Types: []plan.Type{planner.MakePlan2Type(&typ1), planner.MakePlan2Type(&typ2), planner.MakePlan2Type(&typ3)}, } err := arg.Prepare(proc) require.NoError(t, err) diff --git a/pkg/sql/colexec/timewin/timewin.go b/pkg/sql/colexec/timewin/timewin.go index d4a49c37937cf..f6327d0935849 100644 --- a/pkg/sql/colexec/timewin/timewin.go +++ b/pkg/sql/colexec/timewin/timewin.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/timewin/timewin_test.go b/pkg/sql/colexec/timewin/timewin_test.go index 0ec40e82a0100..29b3c76d9a149 100644 --- a/pkg/sql/colexec/timewin/timewin_test.go +++ b/pkg/sql/colexec/timewin/timewin_test.go @@ -25,7 +25,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/colexec/top/top_test.go b/pkg/sql/colexec/top/top_test.go index 48eb00a553a36..2d8a47b3624b5 100644 --- a/pkg/sql/colexec/top/top_test.go +++ b/pkg/sql/colexec/top/top_test.go @@ -18,17 +18,16 @@ import ( "bytes" "testing" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/require" ) const ( @@ -126,7 +125,7 @@ func newTestCase(t testing.TB, m *mpool.MPool, ts []types.Type, limit int64, fs proc: testutil.NewProcessWithMPool(t, "", m), arg: &Top{ Fs: fs, - Limit: plan2.MakePlan2Uint64ConstExprWithType(uint64(limit)), + Limit: planner.MakePlan2Uint64ConstExprWithType(uint64(limit)), OperatorBase: vm.OperatorBase{ OperatorInfo: vm.OperatorInfo{ Idx: 0, diff --git a/pkg/sql/colexec/value_scan/types.go b/pkg/sql/colexec/value_scan/types.go index db3fcb37a301c..839d0405d1e0d 100644 --- a/pkg/sql/colexec/value_scan/types.go +++ b/pkg/sql/colexec/value_scan/types.go @@ -17,7 +17,7 @@ package value_scan import ( "github.com/matrixorigin/matrixone/pkg/common/reuse" "github.com/matrixorigin/matrixone/pkg/container/batch" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -32,10 +32,10 @@ type ValueScan struct { runningCtx container ColCount int - NodeType plan2.Node_NodeType + NodeType plan.Node_NodeType Batchs []*batch.Batch - RowsetData *plan2.RowsetData + RowsetData *plan.RowsetData ExprExecLists [][]colexec.ExpressionExecutor } diff --git a/pkg/sql/colexec/window/window.go b/pkg/sql/colexec/window/window.go index d2937a81b7d0c..68e1539e77962 100644 --- a/pkg/sql/colexec/window/window.go +++ b/pkg/sql/colexec/window/window.go @@ -18,9 +18,6 @@ import ( "bytes" "time" - "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" @@ -28,7 +25,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/partition" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sort" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/colexec/window/window_test.go b/pkg/sql/colexec/window/window_test.go index 18b62cd2c0bdc..0d85ed4cedff0 100644 --- a/pkg/sql/colexec/window/window_test.go +++ b/pkg/sql/colexec/window/window_test.go @@ -19,20 +19,17 @@ import ( "context" "testing" - "github.com/matrixorigin/matrixone/pkg/container/batch" - "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/stretchr/testify/require" - - "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/common/mpool" - "github.com/matrixorigin/matrixone/pkg/vm" - + "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" + "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/require" ) // add unit tests for cases diff --git a/pkg/sql/compile/Remote_Run_DEV_GUIDE.md b/pkg/sql/compile/Remote_Run_DEV_GUIDE.md index 6eb077b6ccf10..81dd5860cc8f5 100644 --- a/pkg/sql/compile/Remote_Run_DEV_GUIDE.md +++ b/pkg/sql/compile/Remote_Run_DEV_GUIDE.md @@ -306,7 +306,7 @@ func (receiver *messageReceiverOnServer) newCompile() (*Compile, error) { // Create Compile object c := allocateNewCompile(proc) - c.execType = plan2.ExecTypeAP_MULTICN + c.execType = planner.ExecTypeAP_MULTICN c.e = cnInfo.storeEngine c.MessageBoard = c.MessageBoard.SetMultiCN(c.GetMessageCenter(), c.proc.GetStmtProfile().GetStmtId()) diff --git a/pkg/sql/compile/agg_optimize_test.go b/pkg/sql/compile/agg_optimize_test.go index 4a9a82a6005f3..45443e21dad73 100644 --- a/pkg/sql/compile/agg_optimize_test.go +++ b/pkg/sql/compile/agg_optimize_test.go @@ -18,7 +18,7 @@ import ( "testing" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/stretchr/testify/require" ) diff --git a/pkg/sql/compile/alter.go b/pkg/sql/compile/alter.go index fd544a991e8a1..18b45ea622855 100644 --- a/pkg/sql/compile/alter.go +++ b/pkg/sql/compile/alter.go @@ -32,7 +32,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vectorindex/idxcron" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -623,7 +623,7 @@ func (s *Scope) doAlterTable(c *Compile) error { return err } - if !plan2.IsFkBannedDatabase(qry.Database) { + if !planner.IsFkBannedDatabase(qry.Database) { //update the mo_foreign_keys for _, sql := range qry.UpdateFkSqls { err = c.runSql(sql) @@ -748,7 +748,7 @@ func notifyParentTableFkTableIdChange(c *Compile, fkey *plan.ForeignKeyDef, oldT } for _, ct := range oldCt.Cts { if def, ok1 := ct.(*engine.RefChildTableDef); ok1 { - def.Tables = plan2.RemoveIf(def.Tables, func(id uint64) bool { + def.Tables = planner.RemoveIf(def.Tables, func(id uint64) bool { return id == oldTableId }) } diff --git a/pkg/sql/compile/alter_test.go b/pkg/sql/compile/alter_test.go index fc4be673ec1d9..8016312cb58b3 100644 --- a/pkg/sql/compile/alter_test.go +++ b/pkg/sql/compile/alter_test.go @@ -20,20 +20,18 @@ import ( "time" "github.com/golang/mock/gomock" - "github.com/prashantv/gostub" - "github.com/smartystreets/goconvey/convey" - "github.com/stretchr/testify/assert" - "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/common/moerr" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/lock" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + "github.com/smartystreets/goconvey/convey" + "github.com/stretchr/testify/assert" ) func TestScope_AlterTableInplace(t *testing.T) { @@ -44,7 +42,7 @@ func TestScope_AlterTableInplace(t *testing.T) { { ColId: 0, Name: "deptno", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: false, @@ -52,7 +50,7 @@ func TestScope_AlterTableInplace(t *testing.T) { Width: 32, Scale: -1, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: true, Primary: true, Pkidx: 0, @@ -60,7 +58,7 @@ func TestScope_AlterTableInplace(t *testing.T) { { ColId: 1, Name: "dname", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -68,7 +66,7 @@ func TestScope_AlterTableInplace(t *testing.T) { Width: 15, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -76,7 +74,7 @@ func TestScope_AlterTableInplace(t *testing.T) { { ColId: 2, Name: "loc", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -84,7 +82,7 @@ func TestScope_AlterTableInplace(t *testing.T) { Width: 50, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -105,7 +103,7 @@ func TestScope_AlterTableInplace(t *testing.T) { TableExist: true, }, }, - Defs: []*plan2.TableDef_DefType{ + Defs: []*plan.TableDef_DefType{ { Def: &plan.TableDef_DefType_Properties{ Properties: &plan.PropertiesDef{ @@ -121,18 +119,18 @@ func TestScope_AlterTableInplace(t *testing.T) { }, } - alterTable := &plan2.AlterTable{ + alterTable := &plan.AlterTable{ Database: "test", TableDef: tableDef, - Actions: []*plan2.AlterTable_Action{ + Actions: []*plan.AlterTable_Action{ { - Action: &plan2.AlterTable_Action_AddIndex{ - AddIndex: &plan2.AlterTableAddIndex{ + Action: &plan.AlterTable_Action_AddIndex{ + AddIndex: &plan.AlterTableAddIndex{ DbName: "test", TableName: "dept", OriginTablePrimaryKey: "deptno", IndexTableExist: true, - IndexInfo: &plan2.CreateTable{ + IndexInfo: &plan.CreateTable{ TableDef: &plan.TableDef{ Indexes: []*plan.IndexDef{ { @@ -150,7 +148,7 @@ func TestScope_AlterTableInplace(t *testing.T) { Cols: []*plan.ColDef{ { Name: "__mo_index_idx_col", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -159,14 +157,14 @@ func TestScope_AlterTableInplace(t *testing.T) { Scale: 0, }, NotNull: false, - Default: &plan2.Default{ + Default: &plan.Default{ NullAbility: false, }, Pkidx: 0, }, { Name: "__mo_index_pri_col", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: false, @@ -175,13 +173,13 @@ func TestScope_AlterTableInplace(t *testing.T) { Scale: -1, }, NotNull: false, - Default: &plan2.Default{ + Default: &plan.Default{ NullAbility: false, }, Pkidx: 0, }, }, - Pkey: &plan2.PrimaryKeyDef{ + Pkey: &plan.PrimaryKeyDef{ PkeyColName: "__mo_index_idx_col", Names: []string{"__mo_index_idx_col"}, }, @@ -195,10 +193,10 @@ func TestScope_AlterTableInplace(t *testing.T) { } cplan := &plan.Plan{ - Plan: &plan2.Plan_Ddl{ - Ddl: &plan2.DataDefinition{ - DdlType: plan2.DataDefinition_ALTER_TABLE, - Definition: &plan2.DataDefinition_AlterTable{ + Plan: &plan.Plan_Ddl{ + Ddl: &plan.DataDefinition{ + DdlType: plan.DataDefinition_ALTER_TABLE, + Definition: &plan.DataDefinition_AlterTable{ AlterTable: alterTable, }, }, @@ -431,7 +429,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 0, Name: "deptno", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: false, @@ -439,7 +437,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 32, Scale: -1, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: true, Primary: true, Pkidx: 0, @@ -447,7 +445,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 1, Name: "dname", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -455,7 +453,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 15, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -463,7 +461,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 2, Name: "loc", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -471,7 +469,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 50, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -492,7 +490,7 @@ func TestScope_AlterTableCopy(t *testing.T) { TableExist: true, }, }, - Defs: []*plan2.TableDef_DefType{ + Defs: []*plan.TableDef_DefType{ { Def: &plan.TableDef_DefType_Properties{ Properties: &plan.PropertiesDef{ @@ -515,7 +513,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 1, Name: "deptno", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: false, @@ -523,7 +521,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 32, Scale: -1, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: true, Primary: true, Pkidx: 0, @@ -531,7 +529,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 2, Name: "dname", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -539,7 +537,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 20, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -547,7 +545,7 @@ func TestScope_AlterTableCopy(t *testing.T) { { ColId: 3, Name: "loc", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -555,7 +553,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Width: 50, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -564,7 +562,7 @@ func TestScope_AlterTableCopy(t *testing.T) { ColId: 4, Name: "__mo_rowid", Hidden: true, - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 101, NotNullable: true, @@ -573,7 +571,7 @@ func TestScope_AlterTableCopy(t *testing.T) { Scale: 0, Table: "dept", }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -596,7 +594,7 @@ func TestScope_AlterTableCopy(t *testing.T) { TableExist: true, }, }, - Defs: []*plan2.TableDef_DefType{ + Defs: []*plan.TableDef_DefType{ { Def: &plan.TableDef_DefType_Properties{ Properties: &plan.PropertiesDef{ @@ -612,17 +610,17 @@ func TestScope_AlterTableCopy(t *testing.T) { }, } - alterTable := &plan2.AlterTable{ + alterTable := &plan.AlterTable{ Database: "test", TableDef: tableDef, CopyTableDef: copyTableDef, } cplan := &plan.Plan{ - Plan: &plan2.Plan_Ddl{ - Ddl: &plan2.DataDefinition{ - DdlType: plan2.DataDefinition_ALTER_TABLE, - Definition: &plan2.DataDefinition_AlterTable{ + Plan: &plan.Plan_Ddl{ + Ddl: &plan.DataDefinition{ + DdlType: plan.DataDefinition_ALTER_TABLE, + Definition: &plan.DataDefinition_AlterTable{ AlterTable: alterTable, }, }, diff --git a/pkg/sql/compile/bloomfilter_cover_test.go b/pkg/sql/compile/bloomfilter_cover_test.go index 4b5365d9d3639..9066a047a08d8 100644 --- a/pkg/sql/compile/bloomfilter_cover_test.go +++ b/pkg/sql/compile/bloomfilter_cover_test.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/stretchr/testify/require" @@ -52,7 +52,7 @@ func TestBuildReadersBloomFilterFullCoverage(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} s.DataSource.RuntimeFilterSpecs = []*plan.RuntimeFilterSpec{} readers, err := s.buildReaders(c) @@ -86,7 +86,7 @@ func TestBuildReadersBloomFilterFullCoverage(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} s.DataSource.RuntimeFilterSpecs = []*plan.RuntimeFilterSpec{} readers, err := s.buildReaders(c) @@ -118,7 +118,7 @@ func TestBuildReadersBloomFilterFullCoverage(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} s.DataSource.RuntimeFilterSpecs = []*plan.RuntimeFilterSpec{} defer func() { @@ -154,7 +154,7 @@ func TestBuildReadersBloomFilterFullCoverage(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} s.DataSource.RuntimeFilterSpecs = []*plan.RuntimeFilterSpec{} defer func() { diff --git a/pkg/sql/compile/compile.go b/pkg/sql/compile/compile.go index 0a92135cd35db..7999b2ef7a05d 100644 --- a/pkg/sql/compile/compile.go +++ b/pkg/sql/compile/compile.go @@ -70,10 +70,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/table_scan" "github.com/matrixorigin/matrixone/pkg/sql/colexec/value_scan" "github.com/matrixorigin/matrixone/pkg/sql/crt" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" "github.com/matrixorigin/matrixone/pkg/sql/util" mokafka "github.com/matrixorigin/matrixone/pkg/stream/adapter/kafka" "github.com/matrixorigin/matrixone/pkg/txn/client" @@ -421,7 +421,7 @@ func (c *Compile) canRetry(err error) bool { } func (c *Compile) IsTpQuery() bool { - return c.execType == plan2.ExecTypeTP + return c.execType == planner.ExecTypeTP } func (c *Compile) IsSingleScope(ss []*Scope) bool { @@ -729,7 +729,7 @@ func (c *Compile) appendMetaTables(objRes *plan.ObjectRef) { func (c *Compile) lockTable() error { for _, tbl := range c.lockTables { - typ := plan2.MakeTypeByPlan2Type(tbl.PrimaryColTyp) + typ := planner.MakeTypeByPlan2Type(tbl.PrimaryColTyp) return lockop.LockTable( c.e, c.proc, @@ -821,10 +821,10 @@ func (c *Compile) compileQuery(qry *plan.Query) ([]*Scope, error) { v2.TxnStatementCompileQueryHistogram.Observe(time.Since(start).Seconds()) }() - c.execType = plan2.GetExecType(c.pn.GetQuery(), c.getHaveDDL(), c.isPrepare) + c.execType = planner.GetExecType(c.pn.GetQuery(), c.getHaveDDL(), c.isPrepare) n := getEngineNode(c) - if c.execType == plan2.ExecTypeTP || c.execType == plan2.ExecTypeAP_ONECN { + if c.execType == planner.ExecTypeTP || c.execType == planner.ExecTypeAP_ONECN { c.cnList = engine.Nodes{n} } else { c.cnList, err = c.getCNList() @@ -845,7 +845,7 @@ func (c *Compile) compileQuery(qry *plan.Query) ([]*Scope, error) { ncpu = min(ncpu, int32(qry.MaxDop)) } - plan2.CalcQueryDOP(c.pn, ncpu, len(c.cnList), c.execType) + planner.CalcQueryDOP(c.pn, ncpu, len(c.cnList), c.execType) c.initAnalyzeModule(qry) // deal with sink scan first. @@ -977,7 +977,7 @@ func (c *Compile) compilePlanScope(step int32, curNodeIdx int32, nodes []*plan.N if node.ObjRef != nil { c.appendMetaTables(node.ObjRef) } - nodeCopy := plan2.DeepCopyNode(node) + nodeCopy := plan.DeepCopyNode(node) c.setAnalyzeCurrent(nil, int(curNodeIdx)) ss, err = c.compileExternScan(nodeCopy) @@ -1471,11 +1471,11 @@ func (c *Compile) getExternParam(proc *process.Process, externScan *plan.ExternS if externScan.Type == int32(plan.ExternType_EXTERNAL_TB) || externScan.Type == int32(plan.ExternType_RESULT_SCAN) { switch param.ScanType { case tree.INFILE: - if err := plan2.InitInfileOrStageParam(param, proc); err != nil { + if err := planner.InitInfileOrStageParam(param, proc); err != nil { return nil, err } case tree.S3: - if err := plan2.InitS3Param(param); err != nil { + if err := planner.InitS3Param(param); err != nil { return nil, err } } @@ -1498,7 +1498,7 @@ func (c *Compile) getExternalFileListAndSize(node *plan.Node, param *tree.Extern case int32(plan.ExternType_EXTERNAL_TB): t := time.Now() _, spanReadDir := trace.Start(c.proc.Ctx, "compileExternScan.ReadDir") - fileList, fileSize, err = plan2.ReadDir(param) + fileList, fileSize, err = planner.ReadDir(param) if err != nil { spanReadDir.End() return nil, nil, err @@ -2097,16 +2097,16 @@ func (c *Compile) compileTableScanDataSource(s *Scope) error { } if len(node.FilterList) != len(s.DataSource.FilterList) { - s.DataSource.FilterList = plan2.DeepCopyExprList(node.FilterList) + s.DataSource.FilterList = plan.DeepCopyExprList(node.FilterList) for _, e := range s.DataSource.FilterList { - _, err := plan2.ReplaceFoldExpr(c.proc, e, &c.filterExprExes) + _, err := planner.ReplaceFoldExpr(c.proc, e, &c.filterExprExes) if err != nil { return err } } } for _, e := range s.DataSource.FilterList { - err = plan2.EvalFoldExpr(c.proc, e, &c.filterExprExes) + err = planner.EvalFoldExpr(c.proc, e, &c.filterExprExes) if err != nil { return err } @@ -2114,9 +2114,9 @@ func (c *Compile) compileTableScanDataSource(s *Scope) error { s.DataSource.FilterExpr = colexec.RewriteFilterExprList(s.DataSource.FilterList) if len(node.BlockFilterList) != len(s.DataSource.BlockFilterList) { - s.DataSource.BlockFilterList = plan2.DeepCopyExprList(node.BlockFilterList) + s.DataSource.BlockFilterList = plan.DeepCopyExprList(node.BlockFilterList) for _, e := range s.DataSource.BlockFilterList { - _, err := plan2.ReplaceFoldExpr(c.proc, e, &c.filterExprExes) + _, err := planner.ReplaceFoldExpr(c.proc, e, &c.filterExprExes) if err != nil { return err } @@ -2144,7 +2144,7 @@ func (c *Compile) compileRestrict(node *plan.Node, ss []*Scope) []*Scope { currentFirstFlag := c.anal.isFirst var op *filter.Filter for i := range ss { - op = constructRestrict(node, plan2.DeepCopyExprList(node.FilterList)) + op = constructRestrict(node, plan.DeepCopyExprList(node.FilterList)) op.SetAnalyzeControl(c.anal.curNodeIdx, currentFirstFlag) ss[i].setRootOperator(op) } @@ -2230,7 +2230,7 @@ func (c *Compile) compileUnion(node *plan.Node, left []*Scope, right []*Scope) [ gn := new(plan.Node) gn.GroupBy = make([]*plan.Expr, len(node.ProjectList)) for i := range gn.GroupBy { - gn.GroupBy[i] = plan2.DeepCopyExpr(node.ProjectList[i]) + gn.GroupBy[i] = plan.DeepCopyExpr(node.ProjectList[i]) gn.GroupBy[i].Typ.NotNullable = false } currentFirstFlag := c.anal.isFirst @@ -2475,7 +2475,7 @@ func (c *Compile) newProbeScopeListForBroadcastJoin(probeScopes []*Scope, forceO func (c *Compile) compileProbeSideForBroadcastJoin(node, left, right *plan.Node, probeScopes []*Scope) []*Scope { var rs []*Scope - isEq := plan2.IsEquiJoin2(node.OnList) + isEq := planner.IsEquiJoin2(node.OnList) rightTypes := make([]types.Type, len(right.ProjectList)) for i, expr := range right.ProjectList { @@ -2751,7 +2751,7 @@ func (c *Compile) compileSort(node *plan.Node, ss []*Scope) []*Scope { } if !overflow && topN <= 8192*2 { // if n is small, convert `order by col limit m offset n` to `top m+n offset n` - return c.compileOffset(node, c.compileTop(node, plan2.MakePlan2Uint64ConstExprWithType(topN), ss)) + return c.compileOffset(node, c.compileTop(node, planner.MakePlan2Uint64ConstExprWithType(topN), ss)) } } return c.compileLimit(node, c.compileOffset(node, c.compileOrder(node, ss))) @@ -2994,7 +2994,7 @@ func (c *Compile) compileSample(node *plan.Node, ss []*Scope) []*Scope { rs := c.newMergeScope(ss) // should sample again if sample by rows. - if node.SampleFunc.Rows != plan2.NotSampleByRows { + if node.SampleFunc.Rows != planner.NotSampleByRows { currentFirstFlag = c.anal.isFirst op := sample.NewMergeSample(constructSample(node, true), false) op.SetAnalyzeControl(c.anal.curNodeIdx, currentFirstFlag) @@ -3254,7 +3254,7 @@ func (c *Compile) compileInsert(nodes []*plan.Node, node *plan.Node, ss []*Scope if c.anal.qry.LoadTag && node.Stats.HashmapStats != nil && node.Stats.HashmapStats.Shuffle && dataScope.NodeInfo.Mcpu == parallelSize && parallelSize > 1 { _, arg := constructDispatchLocalAndRemote(0, scopes, dataScope) arg.FuncId = dispatch.ShuffleToAllFunc - arg.ShuffleType = plan2.ShuffleToLocalMatchedReg + arg.ShuffleType = planner.ShuffleToLocalMatchedReg arg.SetAnalyzeControl(c.anal.curNodeIdx, false) dataScope.setRootOperator(arg) } else { @@ -4091,8 +4091,8 @@ func shouldScanOnCurrentCN(c *Compile, node *plan.Node, forceSingle bool) bool { return true } - if !plan2.GetForceScanOnMultiCN() && - node.Stats.BlockNum <= int32(plan2.BlockThresholdForOneCN) { + if !planner.GetForceScanOnMultiCN() && + node.Stats.BlockNum <= int32(planner.BlockThresholdForOneCN) { return true } @@ -4680,7 +4680,7 @@ func (c *Compile) SetOriginSQL(sql string) { c.originSQL = sql } -func (c *Compile) SetBuildPlanFunc(buildPlanFunc func(ctx context.Context) (*plan2.Plan, error)) { +func (c *Compile) SetBuildPlanFunc(buildPlanFunc func(ctx context.Context) (*plan.Plan, error)) { c.buildPlanFunc = buildPlanFunc } diff --git a/pkg/sql/compile/compile2.go b/pkg/sql/compile/compile2.go index e38aa10c55de4..1bc9ed3793027 100644 --- a/pkg/sql/compile/compile2.go +++ b/pkg/sql/compile/compile2.go @@ -28,7 +28,6 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/txn/client" txnTrace "github.com/matrixorigin/matrixone/pkg/txn/trace" util2 "github.com/matrixorigin/matrixone/pkg/util" @@ -373,7 +372,7 @@ func (c *Compile) prepareRetry(defChanged bool) (*Compile, error) { } }() if defChanged { - var pn *plan2.Plan + var pn *plan.Plan pn, e = c.buildPlanFunc(topContext) if e != nil { return nil, e diff --git a/pkg/sql/compile/compile_test.go b/pkg/sql/compile/compile_test.go index 6af7ef5534662..6242394bca4b6 100644 --- a/pkg/sql/compile/compile_test.go +++ b/pkg/sql/compile/compile_test.go @@ -21,33 +21,30 @@ import ( "testing" "time" - "github.com/stretchr/testify/assert" - + "github.com/golang/mock/gomock" "github.com/matrixorigin/matrixone/pkg/catalog" + "github.com/matrixorigin/matrixone/pkg/cnservice/cnclient" + "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/morpc" "github.com/matrixorigin/matrixone/pkg/common/system" - "github.com/matrixorigin/matrixone/pkg/defines" - "github.com/matrixorigin/matrixone/pkg/perfcounter" - "github.com/matrixorigin/matrixone/pkg/txn/client" - - "github.com/golang/mock/gomock" - "github.com/stretchr/testify/require" - - "github.com/matrixorigin/matrixone/pkg/cnservice/cnclient" - "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/container/batch" + "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" + "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/testutil/testengine" + "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) type compileTestCase struct { @@ -292,7 +289,7 @@ func newTestCase(sql string, t *testing.T) compileTestCase { e, txnClient, compilerCtx := testengine.New(defines.AttachAccountId(context.Background(), catalog.System_Account)) stmts, err := mysql.Parse(compilerCtx.GetContext(), sql, 1) require.NoError(t, err) - pn, err := plan2.BuildPlan(compilerCtx, stmts[0], false) + pn, err := planner.BuildPlan(compilerCtx, stmts[0], false) if err != nil { panic(err) } diff --git a/pkg/sql/compile/ddl.go b/pkg/sql/compile/ddl.go index 3003d0475b786..ef4bf4ee9e197 100644 --- a/pkg/sql/compile/ddl.go +++ b/pkg/sql/compile/ddl.go @@ -25,36 +25,34 @@ import ( "strings" "time" - moruntime "github.com/matrixorigin/matrixone/pkg/common/runtime" - "github.com/matrixorigin/matrixone/pkg/config" - "github.com/matrixorigin/matrixone/pkg/iscp" - "github.com/matrixorigin/matrixone/pkg/pb/task" - - "github.com/matrixorigin/matrixone/pkg/cdc" - "github.com/matrixorigin/matrixone/pkg/taskservice" - "github.com/google/uuid" "github.com/matrixorigin/matrixone/pkg/catalog" + "github.com/matrixorigin/matrixone/pkg/cdc" "github.com/matrixorigin/matrixone/pkg/common/moerr" + moruntime "github.com/matrixorigin/matrixone/pkg/common/runtime" commonutil "github.com/matrixorigin/matrixone/pkg/common/util" + "github.com/matrixorigin/matrixone/pkg/config" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/incrservice" + "github.com/matrixorigin/matrixone/pkg/iscp" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/partitionservice" "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/lock" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/task" "github.com/matrixorigin/matrixone/pkg/shardservice" "github.com/matrixorigin/matrixone/pkg/sql/colexec/lockop" "github.com/matrixorigin/matrixone/pkg/sql/features" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/taskservice" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/util/trace" @@ -404,7 +402,7 @@ func (s *Scope) AlterTableInplace(c *Compile) error { tblId := rel.GetTableID(c.proc.Ctx) extra := rel.GetExtraInfo() - oTableDef := plan2.DeepCopyTableDef(qry.TableDef, true) + oTableDef := plan.DeepCopyTableDef(qry.TableDef, true) var oldCt *engine.ConstraintDef newCt := &engine.ConstraintDef{ @@ -579,7 +577,7 @@ func (s *Scope) AlterTableInplace(c *Compile) error { return moerr.NewErrCantDropFieldOrKey(c.proc.Ctx, constraintName) } hasUpdateConstraints = true - oTableDef.Fkeys = plan2.RemoveIf(oTableDef.Fkeys, func(fk *plan.ForeignKeyDef) bool { + oTableDef.Fkeys = planner.RemoveIf(oTableDef.Fkeys, func(fk *plan.ForeignKeyDef) bool { if fk.Name == constraintName { removeRefChildTbls[constraintName] = fk.ForeignTbl return true @@ -941,7 +939,7 @@ func (s *Scope) AlterTableInplace(c *Compile) error { for _, fkey := range t.Fkeys { //For compatibility, regenerate constraint name for the constraint with empty name. if len(fkey.Name) == 0 { - fkey.Name = plan2.GenConstraintName() + fkey.Name = planner.GenConstraintName() newFkeys = append(newFkeys, fkey) } else if _, ok := removeRefChildTbls[fkey.Name]; !ok { newFkeys = append(newFkeys, fkey) @@ -1276,7 +1274,7 @@ func (s *Scope) CreateTable(c *Compile) error { for _, col := range planCols { colId2Name[col.ColId] = col.Name } - dedupFkName := make(plan2.UnorderedSet[string]) + dedupFkName := make(planner.UnorderedSet[string]) //1. update fk info in child table. //column ids of column names in child table have changed after //the table is created by engine.Database.Create. @@ -1956,7 +1954,7 @@ func (s *Scope) doCreateIndex( tableDef := r.GetTableDef(c.proc.Ctx) extra := r.GetExtraInfo() - originalTableDef := plan2.DeepCopyTableDef(qry.TableDef, true) + originalTableDef := plan.DeepCopyTableDef(qry.TableDef, true) indexInfo := qry.GetIndex() // IndexInfo is named same as planner's IndexInfo indexTableDef := indexInfo.GetTableDef() @@ -2296,7 +2294,7 @@ func makeNewDropConstraint(oldCt *engine.ConstraintDef, dropName string) (*engin pred := func(fkDef *plan.ForeignKeyDef) bool { return fkDef.Name == dropName } - def.Fkeys = plan2.RemoveIf[*plan.ForeignKeyDef](def.Fkeys, pred) + def.Fkeys = planner.RemoveIf[*plan.ForeignKeyDef](def.Fkeys, pred) oldCt.Cts[i] = def case *engine.IndexDef: pred := func(index *plan.IndexDef) bool { @@ -2305,7 +2303,7 @@ func makeNewDropConstraint(oldCt *engine.ConstraintDef, dropName string) (*engin } return index.IndexName == dropName } - def.Indexes = plan2.RemoveIf[*plan.IndexDef](def.Indexes, pred) + def.Indexes = planner.RemoveIf[*plan.IndexDef](def.Indexes, pred) oldCt.Cts[i] = def } } @@ -2327,14 +2325,14 @@ func MakeNewCreateConstraint(oldCt *engine.ConstraintDef, c engine.Constraint) ( _, ok = ct.(*engine.ForeignKeyDef) return ok } - oldCt.Cts = plan2.RemoveIf[engine.Constraint](oldCt.Cts, pred) + oldCt.Cts = planner.RemoveIf[engine.Constraint](oldCt.Cts, pred) oldCt.Cts = append(oldCt.Cts, c) case *engine.RefChildTableDef: pred = func(ct engine.Constraint) bool { _, ok = ct.(*engine.RefChildTableDef) return ok } - oldCt.Cts = plan2.RemoveIf[engine.Constraint](oldCt.Cts, pred) + oldCt.Cts = planner.RemoveIf[engine.Constraint](oldCt.Cts, pred) oldCt.Cts = append(oldCt.Cts, c) case *engine.IndexDef: ok := false @@ -2409,7 +2407,7 @@ func (s *Scope) removeChildTblIdFromParentTable(c *Compile, fkRelation engine.Re } for _, ct := range oldCt.Cts { if def, ok := ct.(*engine.RefChildTableDef); ok { - def.Tables = plan2.RemoveIf[uint64](def.Tables, func(id uint64) bool { + def.Tables = planner.RemoveIf[uint64](def.Tables, func(id uint64) bool { return id == tblId }) break @@ -3143,15 +3141,15 @@ last_seq_num | min_value| max_value| start_value| increment_value| cycle| is_cal func makeSequenceAlterBatch(ctx context.Context, stmt *tree.AlterSequence, tableDef *plan.TableDef, proc *process.Process, result []interface{}, curval string) (*batch.Batch, error) { var bat batch.Batch bat.SetRowCount(1) - attrs := make([]string, len(plan2.Sequence_cols_name)) + attrs := make([]string, len(planner.Sequence_cols_name)) for i := range attrs { - attrs[i] = plan2.Sequence_cols_name[i] + attrs[i] = planner.Sequence_cols_name[i] } bat.Attrs = attrs // typ is sequenece's type now - typ := plan2.MakeTypeByPlan2Type(tableDef.Cols[0].Typ) - vecs := make([]*vector.Vector, len(plan2.Sequence_cols_name)) + typ := planner.MakeTypeByPlan2Type(tableDef.Cols[0].Typ) + vecs := make([]*vector.Vector, len(planner.Sequence_cols_name)) switch typ.Oid { case types.T_int16: @@ -3251,13 +3249,13 @@ func makeSequenceAlterBatch(ctx context.Context, stmt *tree.AlterSequence, table func makeSequenceInitBatch(ctx context.Context, stmt *tree.CreateSequence, tableDef *plan.TableDef, proc *process.Process) (*batch.Batch, error) { var bat batch.Batch bat.SetRowCount(1) - attrs := make([]string, len(plan2.Sequence_cols_name)) + attrs := make([]string, len(planner.Sequence_cols_name)) for i := range attrs { - attrs[i] = plan2.Sequence_cols_name[i] + attrs[i] = planner.Sequence_cols_name[i] } bat.Attrs = attrs - typ := plan2.MakeTypeByPlan2Type(tableDef.Cols[0].Typ) + typ := planner.MakeTypeByPlan2Type(tableDef.Cols[0].Typ) sequence_cols_num := 7 vecs := make([]*vector.Vector, sequence_cols_num) diff --git a/pkg/sql/compile/ddl_test.go b/pkg/sql/compile/ddl_test.go index c0ef52443ae1c..4927a2f4005f8 100644 --- a/pkg/sql/compile/ddl_test.go +++ b/pkg/sql/compile/ddl_test.go @@ -19,30 +19,26 @@ import ( "testing" "time" - "github.com/matrixorigin/matrixone/pkg/pb/api" - "github.com/matrixorigin/matrixone/pkg/pb/lock" - "github.com/matrixorigin/matrixone/pkg/txn/client" - "github.com/prashantv/gostub" - - "github.com/matrixorigin/matrixone/pkg/container/types" - "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/golang/mock/gomock" - "github.com/smartystreets/goconvey/convey" - "github.com/stretchr/testify/assert" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/buffer" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" + "github.com/matrixorigin/matrixone/pkg/container/types" + "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/api" + "github.com/matrixorigin/matrixone/pkg/pb/lock" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/testutil" + "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/prashantv/gostub" + "github.com/smartystreets/goconvey/convey" + "github.com/stretchr/testify/assert" ) func Test_lockIndexTable(t *testing.T) { @@ -127,7 +123,7 @@ func TestScope_CreateTable(t *testing.T) { { ColId: 0, Name: "deptno", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: false, @@ -135,7 +131,7 @@ func TestScope_CreateTable(t *testing.T) { Width: 32, Scale: -1, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: true, Primary: true, Pkidx: 0, @@ -143,7 +139,7 @@ func TestScope_CreateTable(t *testing.T) { { ColId: 1, Name: "dname", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -151,7 +147,7 @@ func TestScope_CreateTable(t *testing.T) { Width: 15, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -159,7 +155,7 @@ func TestScope_CreateTable(t *testing.T) { { ColId: 2, Name: "loc", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -167,7 +163,7 @@ func TestScope_CreateTable(t *testing.T) { Width: 50, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, Pkidx: 0, @@ -179,7 +175,7 @@ func TestScope_CreateTable(t *testing.T) { PkeyColName: "deptno", Names: []string{"deptno"}, }, - Defs: []*plan2.TableDef_DefType{ + Defs: []*plan.TableDef_DefType{ { Def: &plan.TableDef_DefType_Properties{ Properties: &plan.PropertiesDef{ @@ -195,7 +191,7 @@ func TestScope_CreateTable(t *testing.T) { }, } - createTableDef := &plan2.CreateTable{ + createTableDef := &plan.CreateTable{ IfNotExists: false, Database: "test", Replace: false, @@ -203,10 +199,10 @@ func TestScope_CreateTable(t *testing.T) { } cplan := &plan.Plan{ - Plan: &plan2.Plan_Ddl{ - Ddl: &plan2.DataDefinition{ - DdlType: plan2.DataDefinition_CREATE_TABLE, - Definition: &plan2.DataDefinition_CreateTable{ + Plan: &plan.Plan_Ddl{ + Ddl: &plan.DataDefinition{ + DdlType: plan.DataDefinition_CREATE_TABLE, + Definition: &plan.DataDefinition_CreateTable{ CreateTable: createTableDef, }, }, @@ -357,7 +353,7 @@ func TestScope_CreateView(t *testing.T) { Cols: []*plan.ColDef{ { Name: "deptno", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 27, NotNullable: true, @@ -365,13 +361,13 @@ func TestScope_CreateView(t *testing.T) { Width: 32, Scale: -1, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, }, { Name: "dname", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -379,13 +375,13 @@ func TestScope_CreateView(t *testing.T) { Width: 15, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, }, { Name: "loc", - Alg: plan2.CompressType_Lz4, + Alg: plan.CompressType_Lz4, Typ: plan.Type{ Id: 61, NotNullable: false, @@ -393,15 +389,15 @@ func TestScope_CreateView(t *testing.T) { Width: 50, Scale: 0, }, - Default: &plan2.Default{}, + Default: &plan.Default{}, NotNull: false, Primary: false, }, }, - ViewSql: &plan2.ViewDef{ + ViewSql: &plan.ViewDef{ View: `{"Stmt":"create view v1 as select * from dept","DefaultDatabase":"db1"}`, }, - Defs: []*plan2.TableDef_DefType{ + Defs: []*plan.TableDef_DefType{ { Def: &plan.TableDef_DefType_Properties{ Properties: &plan.PropertiesDef{ @@ -417,7 +413,7 @@ func TestScope_CreateView(t *testing.T) { }, } - createViewDef := &plan2.CreateView{ + createViewDef := &plan.CreateView{ IfNotExists: false, Database: "test", Replace: false, @@ -425,10 +421,10 @@ func TestScope_CreateView(t *testing.T) { } cplan := &plan.Plan{ - Plan: &plan2.Plan_Ddl{ - Ddl: &plan2.DataDefinition{ - DdlType: plan2.DataDefinition_CREATE_VIEW, - Definition: &plan2.DataDefinition_CreateView{ + Plan: &plan.Plan_Ddl{ + Ddl: &plan.DataDefinition{ + DdlType: plan.DataDefinition_CREATE_VIEW, + Definition: &plan.DataDefinition_CreateView{ CreateView: createViewDef, }, }, @@ -507,16 +503,16 @@ func TestScope_CreateView(t *testing.T) { } func TestScope_Database(t *testing.T) { - dropDbDef := &plan2.DropDatabase{ + dropDbDef := &plan.DropDatabase{ IfExists: false, Database: "test", } cplan := &plan.Plan{ - Plan: &plan2.Plan_Ddl{ - Ddl: &plan2.DataDefinition{ - DdlType: plan2.DataDefinition_DROP_DATABASE, - Definition: &plan2.DataDefinition_DropDatabase{ + Plan: &plan.Plan_Ddl{ + Ddl: &plan.DataDefinition{ + DdlType: plan.DataDefinition_DROP_DATABASE, + Definition: &plan.DataDefinition_DropDatabase{ DropDatabase: dropDbDef, }, }, @@ -579,8 +575,8 @@ func Test_addTimeSpan(t *testing.T) { } func Test_getSqlForCheckPitrDup(t *testing.T) { - mk := func(level int32, origin bool) *plan2.CreatePitr { - return &plan2.CreatePitr{ + mk := func(level int32, origin bool) *plan.CreatePitr { + return &plan.CreatePitr{ Level: level, CurrentAccountId: 1, AccountName: "acc", @@ -656,7 +652,7 @@ func TestPitrDupError(t *testing.T) { {int32(tree.PITRLEVELTABLE), "", "db", "tb", "table db.tb does not exist"}, } for _, c := range cases { - p := &plan2.CreatePitr{ + p := &plan.CreatePitr{ Level: c.level, AccountName: c.accountName, DatabaseName: c.dbName, diff --git a/pkg/sql/compile/lock_meta.go b/pkg/sql/compile/lock_meta.go index 85826d30d4246..017a603e212fa 100644 --- a/pkg/sql/compile/lock_meta.go +++ b/pkg/sql/compile/lock_meta.go @@ -29,8 +29,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/lockop" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -218,7 +218,7 @@ func (l *LockMeta) initLockExe(e engine.Engine, proc *process.Process) error { accountTyp := types.T_uint32.ToType() accountIdExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&accountTyp), + Typ: planner.MakePlan2Type(&accountTyp), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ ColPos: 0, @@ -228,7 +228,7 @@ func (l *LockMeta) initLockExe(e engine.Engine, proc *process.Process) error { dbNameTyp := types.T_varchar.ToType() dbNameExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&dbNameTyp), + Typ: planner.MakePlan2Type(&dbNameTyp), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ ColPos: 1, @@ -238,7 +238,7 @@ func (l *LockMeta) initLockExe(e engine.Engine, proc *process.Process) error { tblNameTyp := types.T_varchar.ToType() tblNameExpr := &plan.Expr{ - Typ: plan2.MakePlan2Type(&tblNameTyp), + Typ: planner.MakePlan2Type(&tblNameTyp), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ ColPos: 2, @@ -246,7 +246,7 @@ func (l *LockMeta) initLockExe(e engine.Engine, proc *process.Process) error { }, } - lockDbExpr, err := plan2.BindFuncExprImplByPlanExpr(proc.Ctx, function.SerialFunctionName, []*plan.Expr{accountIdExpr, dbNameExpr}) + lockDbExpr, err := planner.BindFuncExprImplByPlanExpr(proc.Ctx, function.SerialFunctionName, []*plan.Expr{accountIdExpr, dbNameExpr}) if err != nil { return err } @@ -256,7 +256,7 @@ func (l *LockMeta) initLockExe(e engine.Engine, proc *process.Process) error { } l.lockDbExe = exec - lockTblxpr, err := plan2.BindFuncExprImplByPlanExpr(proc.Ctx, function.SerialFunctionName, []*plan.Expr{accountIdExpr, dbNameExpr, tblNameExpr}) + lockTblxpr, err := planner.BindFuncExprImplByPlanExpr(proc.Ctx, function.SerialFunctionName, []*plan.Expr{accountIdExpr, dbNameExpr, tblNameExpr}) if err != nil { return err } diff --git a/pkg/sql/compile/operator.go b/pkg/sql/compile/operator.go index 9bc5e34fb2b91..a93e1a41d6db8 100644 --- a/pkg/sql/compile/operator.go +++ b/pkg/sql/compile/operator.go @@ -85,10 +85,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/value_scan" "github.com/matrixorigin/matrixone/pkg/sql/colexec/window" "github.com/matrixorigin/matrixone/pkg/sql/features" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -397,7 +397,7 @@ func dupOperator(sourceOp vm.Operator, index int, maxParallel int) vm.Operator { op.BucketNum = sourceArg.BucketNum op.ShuffleRangeInt64 = sourceArg.ShuffleRangeInt64 op.ShuffleRangeUint64 = sourceArg.ShuffleRangeUint64 - op.RuntimeFilterSpec = plan2.DeepCopyRuntimeFilterSpec(sourceArg.RuntimeFilterSpec) + op.RuntimeFilterSpec = plan.DeepCopyRuntimeFilterSpec(sourceArg.RuntimeFilterSpec) op.SetInfo(&info) return op case vm.Dispatch: @@ -749,7 +749,7 @@ func constructLockOp(node *plan.Node, eng engine.Engine) (*lockop.LockOp, error) if target.HasPartitionCol { partitionColPos = target.PartitionColIdxInBat } - typ := plan2.MakeTypeByPlan2Type(target.PrimaryColTyp) + typ := planner.MakeTypeByPlan2Type(target.PrimaryColTyp) arg.AddLockTarget(target.GetTableId(), target.GetObjRef(), target.GetPrimaryColIdxInBat(), typ, partitionColPos, target.GetRefreshTsIdxInBat(), target.GetLockRows(), target.GetLockTableAtTheEnd()) } for _, target := range node.LockTargets { @@ -1077,10 +1077,10 @@ func constructTimeWindow(_ context.Context, node *plan.Node, proc *process.Proce i := 0 for _, expr := range node.AggList { if e, ok := expr.Expr.(*plan.Expr_Col); ok { - if e.Col.Name == plan2.TimeWindowStart { + if e.Col.Name == planner.TimeWindowStart { wStart = true } - if e.Col.Name == plan2.TimeWindowEnd { + if e.Col.Name == planner.TimeWindowEnd { wEnd = true } continue @@ -1130,8 +1130,8 @@ func constructWindow(_ context.Context, node *plan.Node, proc *process.Process) //for group_concat, the last arg is separator string //for cluster_centers, the last arg is kmeans_args string - if (f.F.Func.ObjName == plan2.NameGroupConcat || - f.F.Func.ObjName == plan2.NameClusterCenters) && len(f.F.Args) > 1 { + if (f.F.Func.ObjName == planner.NameGroupConcat || + f.F.Func.ObjName == planner.NameClusterCenters) && len(f.F.Args) > 1 { argExpr := f.F.Args[len(f.F.Args)-1] vec, free, err := colexec.GetReadonlyResultFromNoColumnExpression(proc, argExpr) if err != nil { @@ -1170,10 +1170,10 @@ func constructLimit(node *plan.Node) *limit.Limit { } func constructSample(node *plan.Node, outputRowCount bool) *sample.Sample { - if node.SampleFunc.Rows != plan2.NotSampleByRows { + if node.SampleFunc.Rows != planner.NotSampleByRows { return sample.NewSampleByRows(int(node.SampleFunc.Rows), node.AggList, node.GroupBy, node.SampleFunc.UsingRow, outputRowCount) } - if node.SampleFunc.Percent != plan2.NotSampleByPercents { + if node.SampleFunc.Percent != planner.NotSampleByPercents { return sample.NewSampleByPercent(node.SampleFunc.Percent, node.AggList, node.GroupBy) } panic("only support sample by rows / percent now.") @@ -1191,8 +1191,8 @@ func constructGroup(_ context.Context, node, childNode *plan.Node, needEval bool if len(f.F.Args) > 0 { //for group_concat, the last arg is separator string //for cluster_centers, the last arg is kmeans_args string - if (f.F.Func.ObjName == plan2.NameGroupConcat || - f.F.Func.ObjName == plan2.NameClusterCenters) && len(f.F.Args) > 1 { + if (f.F.Func.ObjName == planner.NameGroupConcat || + f.F.Func.ObjName == planner.NameClusterCenters) && len(f.F.Args) > 1 { argExpr := f.F.Args[len(f.F.Args)-1] vec, free, err := colexec.GetReadonlyResultFromNoColumnExpression(proc, argExpr) if err != nil { @@ -1299,7 +1299,7 @@ func constructShuffleOperatorForJoinV2(bucketNum int32, node *plan.Node, left bo } } - hashCol, typ := plan2.GetHashColumn(expr) + hashCol, typ := planner.GetHashColumn(expr) arg.ShuffleColIdx = hashCol.ColPos arg.ShuffleType = int32(node.Stats.HashmapStats.ShuffleType) arg.ShuffleColMin = node.Stats.HashmapStats.ShuffleColMin @@ -1307,9 +1307,9 @@ func constructShuffleOperatorForJoinV2(bucketNum int32, node *plan.Node, left bo arg.BucketNum = bucketNum switch types.T(typ) { case types.T_int64, types.T_int32, types.T_int16: - arg.ShuffleRangeInt64 = plan2.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeInt64 = planner.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) case types.T_uint64, types.T_uint32, types.T_uint16, types.T_varchar, types.T_char, types.T_text, types.T_bit, types.T_datalink: - arg.ShuffleRangeUint64 = plan2.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeUint64 = planner.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) } return arg } @@ -1327,7 +1327,7 @@ func constructShuffleOperatorForJoin(bucketNum int32, node *plan.Node, left bool } } - hashCol, typ := plan2.GetHashColumn(expr) + hashCol, typ := planner.GetHashColumn(expr) arg.ShuffleColIdx = hashCol.ColPos arg.ShuffleType = int32(node.Stats.HashmapStats.ShuffleType) arg.ShuffleColMin = node.Stats.HashmapStats.ShuffleColMin @@ -1335,19 +1335,19 @@ func constructShuffleOperatorForJoin(bucketNum int32, node *plan.Node, left bool arg.BucketNum = bucketNum switch types.T(typ) { case types.T_int64, types.T_int32, types.T_int16: - arg.ShuffleRangeInt64 = plan2.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeInt64 = planner.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) case types.T_uint64, types.T_uint32, types.T_uint16, types.T_varchar, types.T_char, types.T_text, types.T_bit, types.T_datalink: - arg.ShuffleRangeUint64 = plan2.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeUint64 = planner.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) } if left && len(node.RuntimeFilterProbeList) > 0 { - arg.RuntimeFilterSpec = plan2.DeepCopyRuntimeFilterSpec(node.RuntimeFilterProbeList[0]) + arg.RuntimeFilterSpec = plan.DeepCopyRuntimeFilterSpec(node.RuntimeFilterProbeList[0]) } return arg } func constructShuffleArgForGroupV2(node *plan.Node, dop int32) *shuffleV2.ShuffleV2 { arg := shuffleV2.NewArgument() - hashCol, typ := plan2.GetHashColumn(node.GroupBy[node.Stats.HashmapStats.ShuffleColIdx]) + hashCol, typ := planner.GetHashColumn(node.GroupBy[node.Stats.HashmapStats.ShuffleColIdx]) arg.ShuffleColIdx = hashCol.ColPos arg.ShuffleType = int32(node.Stats.HashmapStats.ShuffleType) arg.ShuffleColMin = node.Stats.HashmapStats.ShuffleColMin @@ -1355,16 +1355,16 @@ func constructShuffleArgForGroupV2(node *plan.Node, dop int32) *shuffleV2.Shuffl arg.BucketNum = dop switch types.T(typ) { case types.T_int64, types.T_int32, types.T_int16: - arg.ShuffleRangeInt64 = plan2.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeInt64 = planner.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) case types.T_uint64, types.T_uint32, types.T_uint16, types.T_varchar, types.T_char, types.T_text, types.T_bit, types.T_datalink: - arg.ShuffleRangeUint64 = plan2.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeUint64 = planner.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) } return arg } func constructShuffleArgForGroup(ss []*Scope, node *plan.Node) *shuffle.Shuffle { arg := shuffle.NewArgument() - hashCol, typ := plan2.GetHashColumn(node.GroupBy[node.Stats.HashmapStats.ShuffleColIdx]) + hashCol, typ := planner.GetHashColumn(node.GroupBy[node.Stats.HashmapStats.ShuffleColIdx]) arg.ShuffleColIdx = hashCol.ColPos arg.ShuffleType = int32(node.Stats.HashmapStats.ShuffleType) arg.ShuffleColMin = node.Stats.HashmapStats.ShuffleColMin @@ -1372,9 +1372,9 @@ func constructShuffleArgForGroup(ss []*Scope, node *plan.Node) *shuffle.Shuffle arg.BucketNum = int32(len(ss)) switch types.T(typ) { case types.T_int64, types.T_int32, types.T_int16: - arg.ShuffleRangeInt64 = plan2.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeInt64 = planner.ShuffleRangeReEvalSigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) case types.T_uint64, types.T_uint32, types.T_uint16, types.T_varchar, types.T_char, types.T_text, types.T_bit, types.T_datalink: - arg.ShuffleRangeUint64 = plan2.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) + arg.ShuffleRangeUint64 = planner.ShuffleRangeReEvalUnsigned(node.Stats.HashmapStats.Ranges, int(arg.BucketNum), node.Stats.HashmapStats.Nullcnt, int64(node.Stats.TableCnt)) } return arg } @@ -1386,12 +1386,12 @@ func constructDispatch(idx int, target []*Scope, source *Scope, node *plan.Node, arg.FuncId = dispatch.ShuffleToAllFunc if node.Stats.HashmapStats.ShuffleTypeForMultiCN == plan.ShuffleTypeForMultiCN_Hybrid { if left { - arg.ShuffleType = plan2.ShuffleToLocalMatchedReg + arg.ShuffleType = planner.ShuffleToLocalMatchedReg } else { - arg.ShuffleType = plan2.ShuffleToMultiMatchedReg + arg.ShuffleType = planner.ShuffleToMultiMatchedReg } } else { - arg.ShuffleType = plan2.ShuffleToRegIndex + arg.ShuffleType = planner.ShuffleToRegIndex } return arg } @@ -1529,7 +1529,7 @@ func rewriteJoinExprToHashBuildExpr(src []*plan.Expr) []*plan.Expr { dst := make([]*plan.Expr, len(src)) for i := range src { - dst[i] = plan2.DeepCopyExpr(src[i]) + dst[i] = plan.DeepCopyExpr(src[i]) doRelIndexRewrite(dst[i]) } return dst @@ -1653,7 +1653,7 @@ func constructShuffleHashBuild(op vm.Operator, proc *process.Process) *hashbuild ret.HashOnPK = arg.HashOnPK ret.NeedAllocateSels = !arg.HashOnPK if len(arg.RuntimeFilterSpecs) > 0 { - ret.RuntimeFilterSpec = plan2.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) + ret.RuntimeFilterSpec = plan.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) } ret.JoinMapTag = arg.JoinMapTag ret.ShuffleIdx = arg.ShuffleIdx @@ -1669,7 +1669,7 @@ func constructShuffleHashBuild(op vm.Operator, proc *process.Process) *hashbuild ret.DedupColTypes = arg.DedupColTypes ret.DelColIdx = arg.DelColIdx if len(arg.RuntimeFilterSpecs) > 0 { - ret.RuntimeFilterSpec = plan2.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) + ret.RuntimeFilterSpec = plan.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) } ret.JoinMapTag = arg.JoinMapTag ret.ShuffleIdx = arg.ShuffleIdx @@ -1685,7 +1685,7 @@ func constructShuffleHashBuild(op vm.Operator, proc *process.Process) *hashbuild ret.DedupColTypes = arg.DedupColTypes ret.DelColIdx = arg.DelColIdx if len(arg.RuntimeFilterSpecs) > 0 { - ret.RuntimeFilterSpec = plan2.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) + ret.RuntimeFilterSpec = plan.DeepCopyRuntimeFilterSpec(arg.RuntimeFilterSpecs[0]) } ret.JoinMapTag = arg.JoinMapTag ret.ShuffleIdx = arg.ShuffleIdx @@ -1734,7 +1734,7 @@ func constructJoinCondition(expr *plan.Expr, proc *process.Process) (*plan.Expr, } } e, ok := expr.Expr.(*plan.Expr_F) - if !ok || !plan2.IsEqualFunc(e.F.Func.GetObj()) { + if !ok || !planner.IsEqualFunc(e.F.Func.GetObj()) { panic(moerr.NewNYIf(proc.GetTopContext(), "join condition '%s'", expr)) } if exprRelPos(e.F.Args[0]) == 1 { @@ -1790,7 +1790,7 @@ func constructValueScan(proc *process.Process, node *plan.Node) (*value_scan.Val } for i, col := range node.RowsetData.Cols { - vec := vector.NewVec(plan2.MakeTypeByPlan2Type(node.TableDef.Cols[i].Typ)) + vec := vector.NewVec(planner.MakeTypeByPlan2Type(node.TableDef.Cols[i].Typ)) op.Batchs[0].Vecs[i] = vec for j, rowsetExpr := range col.Data { get, err := rule.GetConstantValue2(proc, rowsetExpr.Expr, vec) @@ -1815,11 +1815,11 @@ func extraJoinConditions(exprs []*plan.Expr) (*plan.Expr, []*plan.Expr) { notEqConds := make([]*plan.Expr, 0, len(exprs)) for i, expr := range exprs { if e, ok := expr.Expr.(*plan.Expr_F); ok { - if !plan2.IsEqualFunc(e.F.Func.GetObj()) { + if !planner.IsEqualFunc(e.F.Func.GetObj()) { notEqConds = append(notEqConds, exprs[i]) continue } - lpos, rpos := plan2.HasColExpr(e.F.Args[0], -1), plan2.HasColExpr(e.F.Args[1], -1) + lpos, rpos := planner.HasColExpr(e.F.Args[0], -1), planner.HasColExpr(e.F.Args[1], -1) if lpos == -1 || rpos == -1 || (lpos == rpos) { notEqConds = append(notEqConds, exprs[i]) continue diff --git a/pkg/sql/compile/remoterun.go b/pkg/sql/compile/remoterun.go index da165da3fb304..413c101a91936 100644 --- a/pkg/sql/compile/remoterun.go +++ b/pkg/sql/compile/remoterun.go @@ -74,7 +74,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/top" "github.com/matrixorigin/matrixone/pkg/sql/colexec/unionall" "github.com/matrixorigin/matrixone/pkg/sql/colexec/value_scan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" @@ -858,7 +858,7 @@ func convertToVmOperator(opr *pipeline.Instruction, ctx *scopeContext, eng engin t := opr.GetLockOp() lockArg := lockop.NewArgumentByEngine(eng) for _, target := range t.Targets { - typ := plan2.MakeTypeByPlan2Type(target.PrimaryColTyp) + typ := planner.MakeTypeByPlan2Type(target.PrimaryColTyp) lockArg.AddLockTarget(target.GetTableId(), target.GetObjRef(), target.GetPrimaryColIdxInBat(), typ, target.PartitionColIdxInBat, target.GetRefreshTsIdxInBat(), target.GetLockRows(), target.GetLockTableAtTheEnd()) } for _, target := range t.Targets { diff --git a/pkg/sql/compile/remoterunServer.go b/pkg/sql/compile/remoterunServer.go index c1f4b162e5dba..0ebda17381ff3 100644 --- a/pkg/sql/compile/remoterunServer.go +++ b/pkg/sql/compile/remoterunServer.go @@ -39,7 +39,7 @@ import ( qclient "github.com/matrixorigin/matrixone/pkg/queryservice/client" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/models" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/udf" "github.com/matrixorigin/matrixone/pkg/util/debug/goroutine" @@ -404,7 +404,7 @@ func (receiver *messageReceiverOnServer) newCompile() (*Compile, error) { } c := allocateNewCompile(proc) - c.execType = plan2.ExecTypeAP_MULTICN + c.execType = planner.ExecTypeAP_MULTICN c.e = cnInfo.storeEngine c.MessageBoard = c.MessageBoard.SetMultiCN(c.GetMessageCenter(), c.proc.GetStmtProfile().GetStmtId()) c.proc.SetMessageBoard(c.MessageBoard) @@ -412,7 +412,7 @@ func (receiver *messageReceiverOnServer) newCompile() (*Compile, error) { c.addr = receiver.cnInformation.cnAddr // a method to send back. - c.execType = plan2.ExecTypeAP_MULTICN + c.execType = planner.ExecTypeAP_MULTICN c.fill = func(b *batch.Batch, counter *perfcounter.CounterSet) error { return receiver.sendBatch(b) } diff --git a/pkg/sql/compile/remoterun_test.go b/pkg/sql/compile/remoterun_test.go index 2c46f0cfb2297..55afbc82e7209 100644 --- a/pkg/sql/compile/remoterun_test.go +++ b/pkg/sql/compile/remoterun_test.go @@ -32,6 +32,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" "github.com/matrixorigin/matrixone/pkg/pb/pipeline" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" @@ -70,7 +71,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/table_function" "github.com/matrixorigin/matrixone/pkg/sql/colexec/top" "github.com/matrixorigin/matrixone/pkg/sql/colexec/value_scan" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm" @@ -256,15 +257,15 @@ func Test_convertToVmInstruction(t *testing.T) { {Op: int32(vm.Dispatch), Dispatch: &pipeline.Dispatch{}}, {Op: int32(vm.Group), Agg: &pipeline.Group{}}, {Op: int32(vm.HashJoin), HashJoin: &pipeline.HashJoin{}}, - {Op: int32(vm.Limit), Limit: plan.MakePlan2Int64ConstExprWithType(1)}, + {Op: int32(vm.Limit), Limit: planner.MakePlan2Int64ConstExprWithType(1)}, {Op: int32(vm.LoopJoin), LoopJoin: &pipeline.LoopJoin{}}, - {Op: int32(vm.Offset), Offset: plan.MakePlan2Int64ConstExprWithType(0)}, + {Op: int32(vm.Offset), Offset: planner.MakePlan2Int64ConstExprWithType(0)}, {Op: int32(vm.Order), OrderBy: []*plan.OrderBySpec{}}, {Op: int32(vm.Product), Product: &pipeline.Product{}}, {Op: int32(vm.ProductL2), ProductL2: &pipeline.ProductL2{}}, {Op: int32(vm.Projection), ProjectList: []*plan.Expr{}}, {Op: int32(vm.Filter), Filters: []*plan.Expr{}, RuntimeFilters: []*plan.Expr{}}, - {Op: int32(vm.Top), Limit: plan.MakePlan2Int64ConstExprWithType(1)}, + {Op: int32(vm.Top), Limit: planner.MakePlan2Int64ConstExprWithType(1)}, {Op: int32(vm.Intersect), SetOp: &pipeline.SetOp{}}, {Op: int32(vm.IntersectAll), SetOp: &pipeline.SetOp{}}, {Op: int32(vm.Minus), SetOp: &pipeline.SetOp{}}, @@ -272,7 +273,7 @@ func Test_convertToVmInstruction(t *testing.T) { {Op: int32(vm.Merge), Merge: &pipeline.Merge{}}, {Op: int32(vm.MergeRecursive)}, {Op: int32(vm.MergeGroup), Agg: &pipeline.Group{}}, - {Op: int32(vm.MergeTop), Limit: plan.MakePlan2Int64ConstExprWithType(1)}, + {Op: int32(vm.MergeTop), Limit: planner.MakePlan2Int64ConstExprWithType(1)}, {Op: int32(vm.MergeOrder), OrderBy: []*plan.OrderBySpec{}}, {Op: int32(vm.TableFunction), TableFunction: &pipeline.TableFunction{}}, {Op: int32(vm.HashBuild), HashBuild: &pipeline.HashBuild{}}, diff --git a/pkg/sql/compile/runtime_filter.go b/pkg/sql/compile/runtime_filter.go index 050a8172ae7b0..852a34da79405 100644 --- a/pkg/sql/compile/runtime_filter.go +++ b/pkg/sql/compile/runtime_filter.go @@ -17,8 +17,6 @@ package compile import ( "context" - "github.com/matrixorigin/matrixone/pkg/vm/message" - "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" @@ -26,9 +24,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/errutil" "github.com/matrixorigin/matrixone/pkg/vm/engine" + "github.com/matrixorigin/matrixone/pkg/vm/message" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -90,13 +89,13 @@ func ApplyRuntimeFilters( ) for _, expr := range exprs { - auxIdCnt = plan2.AssignAuxIdForExpr(expr, auxIdCnt) + auxIdCnt = planner.AssignAuxIdForExpr(expr, auxIdCnt) } columnMap := make(map[int]int) zms := make([]objectio.ZoneMap, auxIdCnt) vecs := make([]*vector.Vector, auxIdCnt) - plan2.GetColumnMapByExprs(exprs, tableDef, columnMap) + planner.GetColumnMapByExprs(exprs, tableDef, columnMap) defer func() { for i := range vecs { diff --git a/pkg/sql/compile/scope.go b/pkg/sql/compile/scope.go index 7cba15e0bc797..251029b3cff5a 100644 --- a/pkg/sql/compile/scope.go +++ b/pkg/sql/compile/scope.go @@ -41,7 +41,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/group" "github.com/matrixorigin/matrixone/pkg/sql/colexec/output" "github.com/matrixorigin/matrixone/pkg/sql/colexec/table_scan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" metricv2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" @@ -676,7 +676,7 @@ func (s *Scope) waitForRuntimeFilters(c *Compile) ([]*plan.Expr, bool, error) { case message.RuntimeFilter_DROP: return nil, true, nil case message.RuntimeFilter_IN: - inExpr := plan2.MakeInExpr(c.proc.Ctx, spec.Expr, msg.Card, msg.Data, spec.MatchPrefix) + inExpr := planner.MakeInExpr(c.proc.Ctx, spec.Expr, msg.Card, msg.Data, spec.MatchPrefix) runtimeInExprList = append(runtimeInExprList, inExpr) // TODO: implement BETWEEN expression @@ -728,7 +728,7 @@ func (s *Scope) handleRuntimeFilters(c *Compile, runtimeInExprList []*plan.Expr) } for _, e := range s.DataSource.BlockFilterList { - err := plan2.EvalFoldExpr(s.Proc, e, &c.filterExprExes) + err := planner.EvalFoldExpr(s.Proc, e, &c.filterExprExes) if err != nil { return nil, err } @@ -1089,7 +1089,7 @@ func (s *Scope) buildReaders(c *Compile) (readers []engine.Reader, err error) { return } for i := range s.DataSource.FilterList { - if plan2.IsFalseExpr(s.DataSource.FilterList[i]) { + if planner.IsFalseExpr(s.DataSource.FilterList[i]) { emptyScan = true break } diff --git a/pkg/sql/compile/scope_test.go b/pkg/sql/compile/scope_test.go index 3a485708ec23b..43bb855c1d67b 100644 --- a/pkg/sql/compile/scope_test.go +++ b/pkg/sql/compile/scope_test.go @@ -43,7 +43,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec/table_scan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/testutil/testengine" "github.com/matrixorigin/matrixone/pkg/vm" @@ -155,7 +155,7 @@ func generateScopeCases(t *testing.T, testCases []string) []*Scope { proc.Base.TxnClient = txnCli proc.Base.TxnOperator = txnOp e, _, compilerCtx := testengine.New(defines.AttachAccountId(context.Background(), catalog.System_Account)) - opt := plan2.NewBaseOptimizer(compilerCtx) + opt := planner.NewBaseOptimizer(compilerCtx) ctx := compilerCtx.GetContext() stmts, err := mysql.Parse(ctx, sql, 1) require.NoError(t1, err) @@ -304,7 +304,7 @@ func TestCompileExternValueScan(t *testing.T) { testCompile := NewMockCompile(t) testCompile.cnList = engine.Nodes{engine.Node{Addr: "cn1:6001"}, engine.Node{Addr: "cn2:6001"}} testCompile.addr = "cn1:6001" - testCompile.execType = plan2.ExecTypeAP_MULTICN + testCompile.execType = planner.ExecTypeAP_MULTICN testCompile.anal = &AnalyzeModule{qry: &plan.Query{}} param := &tree.ExternParam{ ExParamConst: tree.ExParamConst{ @@ -324,7 +324,7 @@ func TestCompileExternScanParallelWrite(t *testing.T) { testCompile := NewMockCompile(t) testCompile.cnList = engine.Nodes{engine.Node{Addr: "cn1:6001", Mcpu: 4}, engine.Node{Addr: "cn2:6001", Mcpu: 4}} testCompile.addr = "cn1:6001" - testCompile.execType = plan2.ExecTypeAP_MULTICN + testCompile.execType = planner.ExecTypeAP_MULTICN testCompile.anal = &AnalyzeModule{qry: &plan.Query{}} param := &tree.ExternParam{ ExParamConst: tree.ExParamConst{ @@ -345,7 +345,7 @@ func TestCompileExternScanParallelReadWrite(t *testing.T) { testCompile := NewMockCompile(t) testCompile.cnList = engine.Nodes{engine.Node{Addr: "cn1:6001", Mcpu: 4}, engine.Node{Addr: "cn2:6001", Mcpu: 4}} testCompile.addr = "cn1:6001" - testCompile.execType = plan2.ExecTypeAP_MULTICN + testCompile.execType = planner.ExecTypeAP_MULTICN testCompile.anal = &AnalyzeModule{qry: &plan.Query{}} ctx := context.TODO() param := &tree.ExternParam{ @@ -630,7 +630,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc // Use MakeFalseExpr to make emptyScan = true, skipping getRelData - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} s.DataSource.RuntimeFilterSpecs = []*plan.RuntimeFilterSpec{} readers, err := s.buildReaders(c) @@ -663,7 +663,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) @@ -697,7 +697,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) @@ -733,7 +733,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) @@ -767,7 +767,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) @@ -802,7 +802,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) @@ -837,7 +837,7 @@ func TestBuildReadersBloomFilterHint(t *testing.T) { c := NewMockCompile(t) c.proc = proc - s.DataSource.FilterList = []*plan.Expr{plan2.MakeFalseExpr()} + s.DataSource.FilterList = []*plan.Expr{planner.MakeFalseExpr()} readers, err := s.buildReaders(c) require.NoError(t, err) diff --git a/pkg/sql/compile/sql_executor.go b/pkg/sql/compile/sql_executor.go index 461eaea3b2800..ab537199e7f23 100644 --- a/pkg/sql/compile/sql_executor.go +++ b/pkg/sql/compile/sql_executor.go @@ -31,12 +31,13 @@ import ( "github.com/matrixorigin/matrixone/pkg/lockservice" "github.com/matrixorigin/matrixone/pkg/logservice" "github.com/matrixorigin/matrixone/pkg/logutil" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/perfcounter" qclient "github.com/matrixorigin/matrixone/pkg/queryservice/client" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/taskservice" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/udf" @@ -390,7 +391,7 @@ func (exec *txnExecutor) Exec( *tree.ShowTableNumber, *tree.ShowCreateDatabase, *tree.ShowCreateTable, *tree.ShowIndex, *tree.ExplainStmt, *tree.ExplainAnalyze, *tree.ExplainPhyPlan: - opt := plan.NewBaseOptimizer(compileContext) + opt := planner.NewBaseOptimizer(compileContext) optimized, err := opt.Optimize(stmt, prepared) if err == nil { pn = &plan.Plan{ @@ -402,7 +403,7 @@ func (exec *txnExecutor) Exec( return executor.Result{}, err } default: - pn, err = plan.BuildPlan(compileContext, stmt, prepared) + pn, err = planner.BuildPlan(compileContext, stmt, prepared) } if err != nil { @@ -410,7 +411,7 @@ func (exec *txnExecutor) Exec( } if prepared { - _, _, err := plan.ResetPreparePlan(compileContext, pn) + _, _, err := planner.ResetPreparePlan(compileContext, pn) if err != nil { return executor.Result{}, err } @@ -442,14 +443,14 @@ func (exec *txnExecutor) Exec( if prepared { c.SetBuildPlanFunc(func(ctx context.Context) (*plan.Plan, error) { - pn, err := plan.BuildPlan( + pn, err := planner.BuildPlan( exec.s.getCompileContext(ctx, proc, exec.getDatabase(), lower), stmts[0], true, ) if err != nil { return pn, err } - _, _, err = plan.ResetPreparePlan(compileContext, pn) + _, _, err = planner.ResetPreparePlan(compileContext, pn) if err != nil { return pn, err } @@ -457,7 +458,7 @@ func (exec *txnExecutor) Exec( }) } else { c.SetBuildPlanFunc(func(ctx context.Context) (*plan.Plan, error) { - return plan.BuildPlan( + return planner.BuildPlan( exec.s.getCompileContext(ctx, proc, exec.getDatabase(), lower), stmts[0], false) }) diff --git a/pkg/sql/compile/sql_executor_context.go b/pkg/sql/compile/sql_executor_context.go index 69af363cda3cb..0cbd652f3e9b7 100644 --- a/pkg/sql/compile/sql_executor_context.go +++ b/pkg/sql/compile/sql_executor_context.go @@ -24,26 +24,26 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/defines" - planpb "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/perfcounter" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" ) -var _ plan.CompilerContext = new(compilerContext) +var _ planner.CompilerContext = new(compilerContext) type compilerContext struct { ctx context.Context defaultDB string engine engine.Engine proc *process.Process - statsCache *plan.StatsCache + statsCache *planner.StatsCache buildAlterView bool dbOfView, nameOfView string @@ -70,7 +70,7 @@ func (c *compilerContext) GetSnapshot() *plan.Snapshot { func (c *compilerContext) SetSnapshot(snapshot *plan.Snapshot) { } -func (c *compilerContext) InitExecuteStmtParam(execPlan *planpb.Execute) (*planpb.Plan, tree.Statement, error) { +func (c *compilerContext) InitExecuteStmtParam(execPlan *plan.Execute) (*plan.Plan, tree.Statement, error) { //TODO implement me panic("implement me") } @@ -177,9 +177,9 @@ func (c *compilerContext) doStatsHeavyWork(obj *plan.ObjectRef, snapshot *plan.S return nil, nil } -func (c *compilerContext) GetStatsCache() *plan.StatsCache { +func (c *compilerContext) GetStatsCache() *planner.StatsCache { if c.statsCache == nil { - c.statsCache = plan.NewStatsCache() + c.statsCache = planner.NewStatsCache() } return c.statsCache } @@ -200,7 +200,7 @@ func (c *compilerContext) DatabaseExists(name string, snapshot *plan.Snapshot) b ctx := c.GetContext() txnOpt := c.proc.GetTxnOperator() - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { txnOpt = c.proc.GetTxnOperator().CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -220,7 +220,7 @@ func (c *compilerContext) GetDatabaseId(dbName string, snapshot *plan.Snapshot) ctx := c.GetContext() txnOpt := c.proc.GetTxnOperator() - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { txnOpt = c.proc.GetTxnOperator().CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -290,7 +290,7 @@ func (c *compilerContext) ResolveById(tableId uint64, snapshot *plan.Snapshot) ( ctx := c.GetContext() txnOpt := c.proc.GetTxnOperator() - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { txnOpt = c.proc.GetTxnOperator().CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -309,7 +309,7 @@ func (c *compilerContext) ResolveById(tableId uint64, snapshot *plan.Snapshot) ( } func (c *compilerContext) ResolveIndexTableByRef(ref *plan.ObjectRef, tblName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { - return c.Resolve(plan.DbNameOfObjRef(ref), tblName, snapshot) + return c.Resolve(planner.DbNameOfObjRef(ref), tblName, snapshot) } func (c *compilerContext) Resolve(dbName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { @@ -389,7 +389,7 @@ func (c *compilerContext) getRelation( ctx := c.GetContext() txnOpt := c.proc.GetTxnOperator() - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.proc.GetTxnOperator().Txn().SnapshotTS) { txnOpt = c.proc.GetTxnOperator().CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { diff --git a/pkg/sql/compile/sql_executor_context_test.go b/pkg/sql/compile/sql_executor_context_test.go index 8559f0c506486..879cc853382e4 100644 --- a/pkg/sql/compile/sql_executor_context_test.go +++ b/pkg/sql/compile/sql_executor_context_test.go @@ -18,13 +18,11 @@ import ( "testing" "github.com/golang/mock/gomock" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/mpool" - "github.com/matrixorigin/matrixone/pkg/testutil" - mock_frontend "github.com/matrixorigin/matrixone/pkg/frontend/test" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/testutil" + "github.com/stretchr/testify/require" ) func Test_panic(t *testing.T) { diff --git a/pkg/sql/compile/types.go b/pkg/sql/compile/types.go index df9a36db99604..72c7203bc80b3 100644 --- a/pkg/sql/compile/types.go +++ b/pkg/sql/compile/types.go @@ -31,7 +31,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/colexec" "github.com/matrixorigin/matrixone/pkg/sql/colexec/group" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -250,7 +250,7 @@ type Compile struct { pn *plan.Plan - execType plan2.ExecType + execType planner.ExecType // fill is a result writer runs a callback function. // fill will be called when result data is ready. @@ -295,7 +295,7 @@ type Compile struct { // cnLabel is the CN labels which is received from proxy when build connection. cnLabel map[string]string - buildPlanFunc func(ctx context.Context) (*plan2.Plan, error) + buildPlanFunc func(ctx context.Context) (*plan.Plan, error) startAt time.Time // use for duplicate check fuzzys []*fuzzyCheck diff --git a/pkg/sql/crt/crt.go b/pkg/sql/crt/crt.go index 877806342c7da..5685fba91d141 100644 --- a/pkg/sql/crt/crt.go +++ b/pkg/sql/crt/crt.go @@ -31,7 +31,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/process" "github.com/pierrec/lz4/v4" ) @@ -66,7 +66,7 @@ func GetIOReadCloser(proc *process.Process, param *tree.ExternParam, data string return io.NopCloser(proc.GetLoadLocalReader()), nil } - fs, readPath, err := plan2.GetForETLWithType(param, data) + fs, readPath, err := planner.GetForETLWithType(param, data) if err != nil { return nil, err } diff --git a/pkg/sql/plan/function/agg/avg_tw_cache.go b/pkg/sql/function/agg/avg_tw_cache.go similarity index 100% rename from pkg/sql/plan/function/agg/avg_tw_cache.go rename to pkg/sql/function/agg/avg_tw_cache.go diff --git a/pkg/sql/plan/function/agg/avg_tw_result.go b/pkg/sql/function/agg/avg_tw_result.go similarity index 100% rename from pkg/sql/plan/function/agg/avg_tw_result.go rename to pkg/sql/function/agg/avg_tw_result.go diff --git a/pkg/sql/plan/function/agg/size_test.go b/pkg/sql/function/agg/size_test.go similarity index 100% rename from pkg/sql/plan/function/agg/size_test.go rename to pkg/sql/function/agg/size_test.go diff --git a/pkg/sql/plan/function/agg/special.go b/pkg/sql/function/agg/special.go similarity index 100% rename from pkg/sql/plan/function/agg/special.go rename to pkg/sql/function/agg/special.go diff --git a/pkg/sql/plan/function/agg/types.go b/pkg/sql/function/agg/types.go similarity index 100% rename from pkg/sql/plan/function/agg/types.go rename to pkg/sql/function/agg/types.go diff --git a/pkg/sql/plan/function/agg/window.go b/pkg/sql/function/agg/window.go similarity index 100% rename from pkg/sql/plan/function/agg/window.go rename to pkg/sql/function/agg/window.go diff --git a/pkg/sql/plan/function/arithmetic.go b/pkg/sql/function/arithmetic.go similarity index 100% rename from pkg/sql/plan/function/arithmetic.go rename to pkg/sql/function/arithmetic.go diff --git a/pkg/sql/plan/function/arithmetic_comprehensive_test.go b/pkg/sql/function/arithmetic_comprehensive_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_comprehensive_test.go rename to pkg/sql/function/arithmetic_comprehensive_test.go diff --git a/pkg/sql/plan/function/arithmetic_div_mod_test.go b/pkg/sql/function/arithmetic_div_mod_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_div_mod_test.go rename to pkg/sql/function/arithmetic_div_mod_test.go diff --git a/pkg/sql/plan/function/arithmetic_div_zero_test.go b/pkg/sql/function/arithmetic_div_zero_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_div_zero_test.go rename to pkg/sql/function/arithmetic_div_zero_test.go diff --git a/pkg/sql/plan/function/arithmetic_minus_test.go b/pkg/sql/function/arithmetic_minus_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_minus_test.go rename to pkg/sql/function/arithmetic_minus_test.go diff --git a/pkg/sql/plan/function/arithmetic_multi_test.go b/pkg/sql/function/arithmetic_multi_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_multi_test.go rename to pkg/sql/function/arithmetic_multi_test.go diff --git a/pkg/sql/plan/function/arithmetic_overflow_check.go b/pkg/sql/function/arithmetic_overflow_check.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_overflow_check.go rename to pkg/sql/function/arithmetic_overflow_check.go diff --git a/pkg/sql/plan/function/arithmetic_plus_test.go b/pkg/sql/function/arithmetic_plus_test.go similarity index 100% rename from pkg/sql/plan/function/arithmetic_plus_test.go rename to pkg/sql/function/arithmetic_plus_test.go diff --git a/pkg/sql/plan/function/auto_increase.go b/pkg/sql/function/auto_increase.go similarity index 97% rename from pkg/sql/plan/function/auto_increase.go rename to pkg/sql/function/auto_increase.go index 2c88281c83d08..9e99490462cfb 100644 --- a/pkg/sql/plan/function/auto_increase.go +++ b/pkg/sql/function/auto_increase.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/incrservice" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/baseTemplate.go b/pkg/sql/function/baseTemplate.go similarity index 99% rename from pkg/sql/plan/function/baseTemplate.go rename to pkg/sql/function/baseTemplate.go index c3f00f6547a24..aba612cc23b96 100644 --- a/pkg/sql/plan/function/baseTemplate.go +++ b/pkg/sql/function/baseTemplate.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" "golang.org/x/exp/constraints" ) diff --git a/pkg/sql/plan/function/ctl/cmd_addfaultpoint.go b/pkg/sql/function/ctl/cmd_addfaultpoint.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_addfaultpoint.go rename to pkg/sql/function/ctl/cmd_addfaultpoint.go diff --git a/pkg/sql/plan/function/ctl/cmd_backup.go b/pkg/sql/function/ctl/cmd_backup.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_backup.go rename to pkg/sql/function/ctl/cmd_backup.go diff --git a/pkg/sql/plan/function/ctl/cmd_checkpoint.go b/pkg/sql/function/ctl/cmd_checkpoint.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_checkpoint.go rename to pkg/sql/function/ctl/cmd_checkpoint.go diff --git a/pkg/sql/plan/function/ctl/cmd_core_dump.go b/pkg/sql/function/ctl/cmd_core_dump.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_core_dump.go rename to pkg/sql/function/ctl/cmd_core_dump.go diff --git a/pkg/sql/plan/function/ctl/cmd_core_dump_test.go b/pkg/sql/function/ctl/cmd_core_dump_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_core_dump_test.go rename to pkg/sql/function/ctl/cmd_core_dump_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_disk_cleaner.go b/pkg/sql/function/ctl/cmd_disk_cleaner.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_disk_cleaner.go rename to pkg/sql/function/ctl/cmd_disk_cleaner.go diff --git a/pkg/sql/plan/function/ctl/cmd_flush.go b/pkg/sql/function/ctl/cmd_flush.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_flush.go rename to pkg/sql/function/ctl/cmd_flush.go diff --git a/pkg/sql/plan/function/ctl/cmd_gc.go b/pkg/sql/function/ctl/cmd_gc.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_gc.go rename to pkg/sql/function/ctl/cmd_gc.go diff --git a/pkg/sql/plan/function/ctl/cmd_gckp.go b/pkg/sql/function/ctl/cmd_gckp.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_gckp.go rename to pkg/sql/function/ctl/cmd_gckp.go diff --git a/pkg/sql/plan/function/ctl/cmd_get_table_shards.go b/pkg/sql/function/ctl/cmd_get_table_shards.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_get_table_shards.go rename to pkg/sql/function/ctl/cmd_get_table_shards.go diff --git a/pkg/sql/plan/function/ctl/cmd_inspectdn.go b/pkg/sql/function/ctl/cmd_inspectdn.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_inspectdn.go rename to pkg/sql/function/ctl/cmd_inspectdn.go diff --git a/pkg/sql/plan/function/ctl/cmd_intercept_commit.go b/pkg/sql/function/ctl/cmd_intercept_commit.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_intercept_commit.go rename to pkg/sql/function/ctl/cmd_intercept_commit.go diff --git a/pkg/sql/plan/function/ctl/cmd_label.go b/pkg/sql/function/ctl/cmd_label.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_label.go rename to pkg/sql/function/ctl/cmd_label.go diff --git a/pkg/sql/plan/function/ctl/cmd_label_test.go b/pkg/sql/function/ctl/cmd_label_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_label_test.go rename to pkg/sql/function/ctl/cmd_label_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_lock_service.go b/pkg/sql/function/ctl/cmd_lock_service.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_lock_service.go rename to pkg/sql/function/ctl/cmd_lock_service.go diff --git a/pkg/sql/plan/function/ctl/cmd_lock_service_test.go b/pkg/sql/function/ctl/cmd_lock_service_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_lock_service_test.go rename to pkg/sql/function/ctl/cmd_lock_service_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_merge.go b/pkg/sql/function/ctl/cmd_merge.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_merge.go rename to pkg/sql/function/ctl/cmd_merge.go diff --git a/pkg/sql/plan/function/ctl/cmd_merge_test.go b/pkg/sql/function/ctl/cmd_merge_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_merge_test.go rename to pkg/sql/function/ctl/cmd_merge_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_mo_table_stats.go b/pkg/sql/function/ctl/cmd_mo_table_stats.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_mo_table_stats.go rename to pkg/sql/function/ctl/cmd_mo_table_stats.go diff --git a/pkg/sql/plan/function/ctl/cmd_mo_table_stats_test.go b/pkg/sql/function/ctl/cmd_mo_table_stats_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_mo_table_stats_test.go rename to pkg/sql/function/ctl/cmd_mo_table_stats_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_ping.go b/pkg/sql/function/ctl/cmd_ping.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_ping.go rename to pkg/sql/function/ctl/cmd_ping.go diff --git a/pkg/sql/plan/function/ctl/cmd_ping_test.go b/pkg/sql/function/ctl/cmd_ping_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_ping_test.go rename to pkg/sql/function/ctl/cmd_ping_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_reload_increment.go b/pkg/sql/function/ctl/cmd_reload_increment.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_reload_increment.go rename to pkg/sql/function/ctl/cmd_reload_increment.go diff --git a/pkg/sql/plan/function/ctl/cmd_reload_increment_test.go b/pkg/sql/function/ctl/cmd_reload_increment_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_reload_increment_test.go rename to pkg/sql/function/ctl/cmd_reload_increment_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_rpc_version.go b/pkg/sql/function/ctl/cmd_rpc_version.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_rpc_version.go rename to pkg/sql/function/ctl/cmd_rpc_version.go diff --git a/pkg/sql/plan/function/ctl/cmd_rpc_version_test.go b/pkg/sql/function/ctl/cmd_rpc_version_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_rpc_version_test.go rename to pkg/sql/function/ctl/cmd_rpc_version_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_snapshot.go b/pkg/sql/function/ctl/cmd_snapshot.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_snapshot.go rename to pkg/sql/function/ctl/cmd_snapshot.go diff --git a/pkg/sql/plan/function/ctl/cmd_snapshot_test.go b/pkg/sql/function/ctl/cmd_snapshot_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_snapshot_test.go rename to pkg/sql/function/ctl/cmd_snapshot_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_table_extra.go b/pkg/sql/function/ctl/cmd_table_extra.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_table_extra.go rename to pkg/sql/function/ctl/cmd_table_extra.go diff --git a/pkg/sql/plan/function/ctl/cmd_task.go b/pkg/sql/function/ctl/cmd_task.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_task.go rename to pkg/sql/function/ctl/cmd_task.go diff --git a/pkg/sql/plan/function/ctl/cmd_task_test.go b/pkg/sql/function/ctl/cmd_task_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_task_test.go rename to pkg/sql/function/ctl/cmd_task_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_tracespan.go b/pkg/sql/function/ctl/cmd_tracespan.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_tracespan.go rename to pkg/sql/function/ctl/cmd_tracespan.go diff --git a/pkg/sql/plan/function/ctl/cmd_tracespan_test.go b/pkg/sql/function/ctl/cmd_tracespan_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_tracespan_test.go rename to pkg/sql/function/ctl/cmd_tracespan_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_txn_trace.go b/pkg/sql/function/ctl/cmd_txn_trace.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_txn_trace.go rename to pkg/sql/function/ctl/cmd_txn_trace.go diff --git a/pkg/sql/plan/function/ctl/cmd_unsubscribe_table.go b/pkg/sql/function/ctl/cmd_unsubscribe_table.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_unsubscribe_table.go rename to pkg/sql/function/ctl/cmd_unsubscribe_table.go diff --git a/pkg/sql/plan/function/ctl/cmd_unsubscribe_table_test.go b/pkg/sql/function/ctl/cmd_unsubscribe_table_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_unsubscribe_table_test.go rename to pkg/sql/function/ctl/cmd_unsubscribe_table_test.go diff --git a/pkg/sql/plan/function/ctl/cmd_workspace_threshold.go b/pkg/sql/function/ctl/cmd_workspace_threshold.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_workspace_threshold.go rename to pkg/sql/function/ctl/cmd_workspace_threshold.go diff --git a/pkg/sql/plan/function/ctl/cmd_workspace_threshold_test.go b/pkg/sql/function/ctl/cmd_workspace_threshold_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/cmd_workspace_threshold_test.go rename to pkg/sql/function/ctl/cmd_workspace_threshold_test.go diff --git a/pkg/sql/plan/function/ctl/ctl.go b/pkg/sql/function/ctl/ctl.go similarity index 98% rename from pkg/sql/plan/function/ctl/ctl.go rename to pkg/sql/function/ctl/ctl.go index ea90eca033e0a..0baf2978f8cd8 100644 --- a/pkg/sql/plan/function/ctl/ctl.go +++ b/pkg/sql/function/ctl/ctl.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/txn" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/util/json" "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/plan/function/ctl/dist.go b/pkg/sql/function/ctl/dist.go similarity index 100% rename from pkg/sql/plan/function/ctl/dist.go rename to pkg/sql/function/ctl/dist.go diff --git a/pkg/sql/plan/function/ctl/object_filter.go b/pkg/sql/function/ctl/object_filter.go similarity index 100% rename from pkg/sql/plan/function/ctl/object_filter.go rename to pkg/sql/function/ctl/object_filter.go diff --git a/pkg/sql/plan/function/ctl/object_filter_test.go b/pkg/sql/function/ctl/object_filter_test.go similarity index 100% rename from pkg/sql/plan/function/ctl/object_filter_test.go rename to pkg/sql/function/ctl/object_filter_test.go diff --git a/pkg/sql/plan/function/ctl/reader.go b/pkg/sql/function/ctl/reader.go similarity index 100% rename from pkg/sql/plan/function/ctl/reader.go rename to pkg/sql/function/ctl/reader.go diff --git a/pkg/sql/plan/function/ctl/types.go b/pkg/sql/function/ctl/types.go similarity index 100% rename from pkg/sql/plan/function/ctl/types.go rename to pkg/sql/function/ctl/types.go diff --git a/pkg/sql/plan/function/cxcall.go b/pkg/sql/function/cxcall.go similarity index 99% rename from pkg/sql/plan/function/cxcall.go rename to pkg/sql/function/cxcall.go index 2ea2faf83aff1..f3b61231c874e 100644 --- a/pkg/sql/plan/function/cxcall.go +++ b/pkg/sql/function/cxcall.go @@ -15,7 +15,7 @@ package function /* -#include "../../../../cgo/mo.h" +#include "../../../cgo/mo.h" */ import "C" import ( diff --git a/pkg/sql/plan/function/fault/fault.go b/pkg/sql/function/fault/fault.go similarity index 97% rename from pkg/sql/plan/function/fault/fault.go rename to pkg/sql/function/fault/fault.go index 0226fdd574f52..15ae9406cf846 100644 --- a/pkg/sql/plan/function/fault/fault.go +++ b/pkg/sql/function/fault/fault.go @@ -26,8 +26,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/metadata" "github.com/matrixorigin/matrixone/pkg/pb/query" "github.com/matrixorigin/matrixone/pkg/pb/txn" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/util/json" "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" diff --git a/pkg/sql/plan/function/fault/fault_test.go b/pkg/sql/function/fault/fault_test.go similarity index 100% rename from pkg/sql/plan/function/fault/fault_test.go rename to pkg/sql/function/fault/fault_test.go diff --git a/pkg/sql/plan/function/fault/types.go b/pkg/sql/function/fault/types.go similarity index 100% rename from pkg/sql/plan/function/fault/types.go rename to pkg/sql/function/fault/types.go diff --git a/pkg/sql/plan/function/func_bench_test.go b/pkg/sql/function/func_bench_test.go similarity index 100% rename from pkg/sql/plan/function/func_bench_test.go rename to pkg/sql/function/func_bench_test.go diff --git a/pkg/sql/plan/function/func_binary.go b/pkg/sql/function/func_binary.go similarity index 99% rename from pkg/sql/plan/function/func_binary.go rename to pkg/sql/function/func_binary.go index 3c23b19e06f56..13fada62b0a6c 100644 --- a/pkg/sql/plan/function/func_binary.go +++ b/pkg/sql/function/func_binary.go @@ -29,9 +29,6 @@ import ( "strings" "time" - "github.com/matrixorigin/matrixone/pkg/util/fault" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/nulls" @@ -39,13 +36,15 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/metadata" - fj "github.com/matrixorigin/matrixone/pkg/sql/plan/function/fault" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + fj "github.com/matrixorigin/matrixone/pkg/sql/function/fault" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/vectorize/floor" "github.com/matrixorigin/matrixone/pkg/vectorize/format" "github.com/matrixorigin/matrixone/pkg/vectorize/instr" "github.com/matrixorigin/matrixone/pkg/vectorize/moarray" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" "golang.org/x/exp/constraints" ) diff --git a/pkg/sql/plan/function/func_binary_aes_test.go b/pkg/sql/function/func_binary_aes_test.go similarity index 100% rename from pkg/sql/plan/function/func_binary_aes_test.go rename to pkg/sql/function/func_binary_aes_test.go diff --git a/pkg/sql/plan/function/func_binary_test.go b/pkg/sql/function/func_binary_test.go similarity index 100% rename from pkg/sql/plan/function/func_binary_test.go rename to pkg/sql/function/func_binary_test.go diff --git a/pkg/sql/plan/function/func_builtin.go b/pkg/sql/function/func_builtin.go similarity index 99% rename from pkg/sql/plan/function/func_builtin.go rename to pkg/sql/function/func_builtin.go index 210956e16475c..2c62f5a00f0ac 100644 --- a/pkg/sql/plan/function/func_builtin.go +++ b/pkg/sql/function/func_builtin.go @@ -25,7 +25,6 @@ import ( "time" "github.com/google/uuid" - "github.com/matrixorigin/matrixone/pkg/common/hashmap" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/runtime" @@ -38,7 +37,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/util/export/table" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace" diff --git a/pkg/sql/plan/function/func_builtin_jq.go b/pkg/sql/function/func_builtin_jq.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_jq.go rename to pkg/sql/function/func_builtin_jq.go diff --git a/pkg/sql/plan/function/func_builtin_json.go b/pkg/sql/function/func_builtin_json.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_json.go rename to pkg/sql/function/func_builtin_json.go diff --git a/pkg/sql/plan/function/func_builtin_leastgreatest.go b/pkg/sql/function/func_builtin_leastgreatest.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_leastgreatest.go rename to pkg/sql/function/func_builtin_leastgreatest.go diff --git a/pkg/sql/plan/function/func_builtin_leastgreatest_test.go b/pkg/sql/function/func_builtin_leastgreatest_test.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_leastgreatest_test.go rename to pkg/sql/function/func_builtin_leastgreatest_test.go diff --git a/pkg/sql/plan/function/func_builtin_llm.go b/pkg/sql/function/func_builtin_llm.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_llm.go rename to pkg/sql/function/func_builtin_llm.go diff --git a/pkg/sql/plan/function/func_builtin_regexp.go b/pkg/sql/function/func_builtin_regexp.go similarity index 99% rename from pkg/sql/plan/function/func_builtin_regexp.go rename to pkg/sql/function/func_builtin_regexp.go index 8e90bd4b0f02c..19ebad95e5075 100644 --- a/pkg/sql/plan/function/func_builtin_regexp.go +++ b/pkg/sql/function/func_builtin_regexp.go @@ -20,13 +20,12 @@ import ( "regexp" "unicode/utf8" - "github.com/matrixorigin/matrixone/pkg/container/nulls" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/util" + "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/func_builtin_regexp_test.go b/pkg/sql/function/func_builtin_regexp_test.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_regexp_test.go rename to pkg/sql/function/func_builtin_regexp_test.go diff --git a/pkg/sql/plan/function/func_builtin_serial.go b/pkg/sql/function/func_builtin_serial.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_serial.go rename to pkg/sql/function/func_builtin_serial.go diff --git a/pkg/sql/plan/function/func_builtin_starlark.go b/pkg/sql/function/func_builtin_starlark.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_starlark.go rename to pkg/sql/function/func_builtin_starlark.go diff --git a/pkg/sql/plan/function/func_builtin_string_test.go b/pkg/sql/function/func_builtin_string_test.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_string_test.go rename to pkg/sql/function/func_builtin_string_test.go diff --git a/pkg/sql/plan/function/func_builtin_test.go b/pkg/sql/function/func_builtin_test.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_test.go rename to pkg/sql/function/func_builtin_test.go diff --git a/pkg/sql/plan/function/func_builtin_w.go b/pkg/sql/function/func_builtin_w.go similarity index 100% rename from pkg/sql/plan/function/func_builtin_w.go rename to pkg/sql/function/func_builtin_w.go diff --git a/pkg/sql/plan/function/func_cast.go b/pkg/sql/function/func_cast.go similarity index 100% rename from pkg/sql/plan/function/func_cast.go rename to pkg/sql/function/func_cast.go diff --git a/pkg/sql/plan/function/func_cast_test.go b/pkg/sql/function/func_cast_test.go similarity index 100% rename from pkg/sql/plan/function/func_cast_test.go rename to pkg/sql/function/func_cast_test.go diff --git a/pkg/sql/plan/function/func_compare.go b/pkg/sql/function/func_compare.go similarity index 100% rename from pkg/sql/plan/function/func_compare.go rename to pkg/sql/function/func_compare.go diff --git a/pkg/sql/plan/function/func_compare_logic_test.go b/pkg/sql/function/func_compare_logic_test.go similarity index 100% rename from pkg/sql/plan/function/func_compare_logic_test.go rename to pkg/sql/function/func_compare_logic_test.go diff --git a/pkg/sql/plan/function/func_compare_test.go b/pkg/sql/function/func_compare_test.go similarity index 100% rename from pkg/sql/plan/function/func_compare_test.go rename to pkg/sql/function/func_compare_test.go diff --git a/pkg/sql/plan/function/func_feature_upsert.go b/pkg/sql/function/func_feature_upsert.go similarity index 100% rename from pkg/sql/plan/function/func_feature_upsert.go rename to pkg/sql/function/func_feature_upsert.go diff --git a/pkg/sql/plan/function/func_fulltext.go b/pkg/sql/function/func_fulltext.go similarity index 100% rename from pkg/sql/plan/function/func_fulltext.go rename to pkg/sql/function/func_fulltext.go diff --git a/pkg/sql/plan/function/func_get_admin.go b/pkg/sql/function/func_get_admin.go similarity index 100% rename from pkg/sql/plan/function/func_get_admin.go rename to pkg/sql/function/func_get_admin.go diff --git a/pkg/sql/plan/function/func_locate.go b/pkg/sql/function/func_locate.go similarity index 98% rename from pkg/sql/plan/function/func_locate.go rename to pkg/sql/function/func_locate.go index 004e97382ba9f..f2b553fab9ab8 100644 --- a/pkg/sql/plan/function/func_locate.go +++ b/pkg/sql/function/func_locate.go @@ -20,7 +20,7 @@ import ( "unicode/utf8" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/func_locate_test.go b/pkg/sql/function/func_locate_test.go similarity index 100% rename from pkg/sql/plan/function/func_locate_test.go rename to pkg/sql/function/func_locate_test.go diff --git a/pkg/sql/plan/function/func_math_complex_test.go b/pkg/sql/function/func_math_complex_test.go similarity index 100% rename from pkg/sql/plan/function/func_math_complex_test.go rename to pkg/sql/function/func_math_complex_test.go diff --git a/pkg/sql/plan/function/func_mo.go b/pkg/sql/function/func_mo.go similarity index 99% rename from pkg/sql/plan/function/func_mo.go rename to pkg/sql/function/func_mo.go index a67ee9769f57a..b04da42d8bbf3 100644 --- a/pkg/sql/plan/function/func_mo.go +++ b/pkg/sql/function/func_mo.go @@ -24,8 +24,6 @@ import ( "sync/atomic" "time" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/pubsub" @@ -36,10 +34,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" ) const ( diff --git a/pkg/sql/plan/function/func_mo_explain_phy.go b/pkg/sql/function/func_mo_explain_phy.go similarity index 97% rename from pkg/sql/plan/function/func_mo_explain_phy.go rename to pkg/sql/function/func_mo_explain_phy.go index af0e07ef15294..0e03e1f145c98 100644 --- a/pkg/sql/plan/function/func_mo_explain_phy.go +++ b/pkg/sql/function/func_mo_explain_phy.go @@ -22,8 +22,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/config" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/sql/models" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/func_mo_explain_phy_test.go b/pkg/sql/function/func_mo_explain_phy_test.go similarity index 100% rename from pkg/sql/plan/function/func_mo_explain_phy_test.go rename to pkg/sql/function/func_mo_explain_phy_test.go diff --git a/pkg/sql/plan/function/func_mo_tuple_expr.go b/pkg/sql/function/func_mo_tuple_expr.go similarity index 100% rename from pkg/sql/plan/function/func_mo_tuple_expr.go rename to pkg/sql/function/func_mo_tuple_expr.go diff --git a/pkg/sql/plan/function/func_prefix.go b/pkg/sql/function/func_prefix.go similarity index 100% rename from pkg/sql/plan/function/func_prefix.go rename to pkg/sql/function/func_prefix.go diff --git a/pkg/sql/plan/function/func_prefix_test.go b/pkg/sql/function/func_prefix_test.go similarity index 100% rename from pkg/sql/plan/function/func_prefix_test.go rename to pkg/sql/function/func_prefix_test.go diff --git a/pkg/sql/plan/function/func_str_to_date.go b/pkg/sql/function/func_str_to_date.go similarity index 98% rename from pkg/sql/plan/function/func_str_to_date.go rename to pkg/sql/function/func_str_to_date.go index 8bfb1cf09040d..80b822118f3a8 100644 --- a/pkg/sql/plan/function/func_str_to_date.go +++ b/pkg/sql/function/func_str_to_date.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/func_string_complex_test.go b/pkg/sql/function/func_string_complex_test.go similarity index 100% rename from pkg/sql/plan/function/func_string_complex_test.go rename to pkg/sql/function/func_string_complex_test.go diff --git a/pkg/sql/plan/function/func_testcase.go b/pkg/sql/function/func_testcase.go similarity index 100% rename from pkg/sql/plan/function/func_testcase.go rename to pkg/sql/function/func_testcase.go diff --git a/pkg/sql/plan/function/func_unary.go b/pkg/sql/function/func_unary.go similarity index 99% rename from pkg/sql/plan/function/func_unary.go rename to pkg/sql/function/func_unary.go index 867deca197332..871cc90a92713 100644 --- a/pkg/sql/plan/function/func_unary.go +++ b/pkg/sql/function/func_unary.go @@ -41,27 +41,25 @@ import ( "unicode/utf8" "unsafe" - "github.com/matrixorigin/matrixone/pkg/common/util" - "github.com/matrixorigin/matrixone/pkg/datalink" - "github.com/RoaringBitmap/roaring" - "golang.org/x/exp/constraints" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/common/system" + "github.com/matrixorigin/matrixone/pkg/common/util" "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/datalink" "github.com/matrixorigin/matrixone/pkg/fileservice" "github.com/matrixorigin/matrixone/pkg/logutil" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/vectorize/lengthutf8" "github.com/matrixorigin/matrixone/pkg/vectorize/moarray" "github.com/matrixorigin/matrixone/pkg/vectorize/momath" "github.com/matrixorigin/matrixone/pkg/version" "github.com/matrixorigin/matrixone/pkg/vm/process" + "golang.org/x/exp/constraints" ) func AbsUInt64(ivecs []*vector.Vector, result vector.FunctionResultWrapper, proc *process.Process, length int, selectList *FunctionSelectList) error { diff --git a/pkg/sql/plan/function/func_unary_test.go b/pkg/sql/function/func_unary_test.go similarity index 99% rename from pkg/sql/plan/function/func_unary_test.go rename to pkg/sql/function/func_unary_test.go index 1d0f4b7ed65f7..3de4e968625ff 100644 --- a/pkg/sql/plan/function/func_unary_test.go +++ b/pkg/sql/function/func_unary_test.go @@ -22,16 +22,14 @@ import ( "testing" "time" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/fileservice" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) type tcTemp struct { diff --git a/pkg/sql/plan/function/function.go b/pkg/sql/function/function.go similarity index 100% rename from pkg/sql/plan/function/function.go rename to pkg/sql/function/function.go diff --git a/pkg/sql/plan/function/functionUtil/quickCast.go b/pkg/sql/function/functionUtil/quickCast.go similarity index 100% rename from pkg/sql/plan/function/functionUtil/quickCast.go rename to pkg/sql/function/functionUtil/quickCast.go diff --git a/pkg/sql/plan/function/functionUtil/strUtil.go b/pkg/sql/function/functionUtil/strUtil.go similarity index 100% rename from pkg/sql/plan/function/functionUtil/strUtil.go rename to pkg/sql/function/functionUtil/strUtil.go diff --git a/pkg/sql/plan/function/function_id.go b/pkg/sql/function/function_id.go similarity index 100% rename from pkg/sql/plan/function/function_id.go rename to pkg/sql/function/function_id.go diff --git a/pkg/sql/plan/function/function_id_test.go b/pkg/sql/function/function_id_test.go similarity index 100% rename from pkg/sql/plan/function/function_id_test.go rename to pkg/sql/function/function_id_test.go diff --git a/pkg/sql/plan/function/function_layout.go b/pkg/sql/function/function_layout.go similarity index 100% rename from pkg/sql/plan/function/function_layout.go rename to pkg/sql/function/function_layout.go diff --git a/pkg/sql/plan/function/function_test.go b/pkg/sql/function/function_test.go similarity index 100% rename from pkg/sql/plan/function/function_test.go rename to pkg/sql/function/function_test.go diff --git a/pkg/sql/plan/function/generalTime.go b/pkg/sql/function/generalTime.go similarity index 100% rename from pkg/sql/plan/function/generalTime.go rename to pkg/sql/function/generalTime.go diff --git a/pkg/sql/plan/function/init.go b/pkg/sql/function/init.go similarity index 100% rename from pkg/sql/plan/function/init.go rename to pkg/sql/function/init.go diff --git a/pkg/sql/plan/function/list_agg.go b/pkg/sql/function/list_agg.go similarity index 99% rename from pkg/sql/plan/function/list_agg.go rename to pkg/sql/function/list_agg.go index 96dbf6f656c22..40dba40303985 100644 --- a/pkg/sql/plan/function/list_agg.go +++ b/pkg/sql/function/list_agg.go @@ -18,7 +18,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/agg" + "github.com/matrixorigin/matrixone/pkg/sql/function/agg" ) var supportedAggInNewFramework = []FuncNew{ diff --git a/pkg/sql/plan/function/list_builtIn.go b/pkg/sql/function/list_builtIn.go similarity index 99% rename from pkg/sql/plan/function/list_builtIn.go rename to pkg/sql/function/list_builtIn.go index 9a3f78f100fc2..eed720e60157e 100644 --- a/pkg/sql/plan/function/list_builtIn.go +++ b/pkg/sql/function/list_builtIn.go @@ -21,9 +21,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" - fj "github.com/matrixorigin/matrixone/pkg/sql/plan/function/fault" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" + fj "github.com/matrixorigin/matrixone/pkg/sql/function/fault" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/function/list_operator.go b/pkg/sql/function/list_operator.go similarity index 100% rename from pkg/sql/plan/function/list_operator.go rename to pkg/sql/function/list_operator.go diff --git a/pkg/sql/plan/function/list_window.go b/pkg/sql/function/list_window.go similarity index 99% rename from pkg/sql/plan/function/list_window.go rename to pkg/sql/function/list_window.go index d6c59dd3cc575..51d0b8266cb36 100644 --- a/pkg/sql/plan/function/list_window.go +++ b/pkg/sql/function/list_window.go @@ -18,7 +18,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec/aggexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/agg" + "github.com/matrixorigin/matrixone/pkg/sql/function/agg" ) var supportedWindowInNewFramework = []FuncNew{ diff --git a/pkg/sql/plan/function/list_window_test.go b/pkg/sql/function/list_window_test.go similarity index 100% rename from pkg/sql/plan/function/list_window_test.go rename to pkg/sql/function/list_window_test.go diff --git a/pkg/sql/plan/function/logicalOperator.go b/pkg/sql/function/logicalOperator.go similarity index 100% rename from pkg/sql/plan/function/logicalOperator.go rename to pkg/sql/function/logicalOperator.go diff --git a/pkg/sql/plan/function/operatorSet.go b/pkg/sql/function/operatorSet.go similarity index 100% rename from pkg/sql/plan/function/operatorSet.go rename to pkg/sql/function/operatorSet.go diff --git a/pkg/sql/plan/function/operatorSet_test.go b/pkg/sql/function/operatorSet_test.go similarity index 100% rename from pkg/sql/plan/function/operatorSet_test.go rename to pkg/sql/function/operatorSet_test.go diff --git a/pkg/sql/plan/function/operator_between.go b/pkg/sql/function/operator_between.go similarity index 100% rename from pkg/sql/plan/function/operator_between.go rename to pkg/sql/function/operator_between.go diff --git a/pkg/sql/plan/function/operator_in.go b/pkg/sql/function/operator_in.go similarity index 100% rename from pkg/sql/plan/function/operator_in.go rename to pkg/sql/function/operator_in.go diff --git a/pkg/sql/plan/function/python_udf.go b/pkg/sql/function/python_udf.go similarity index 100% rename from pkg/sql/plan/function/python_udf.go rename to pkg/sql/function/python_udf.go diff --git a/pkg/sql/plan/function/seqfunc.go b/pkg/sql/function/seqfunc.go similarity index 99% rename from pkg/sql/plan/function/seqfunc.go rename to pkg/sql/function/seqfunc.go index 377b9870b57c7..d508f3272b434 100644 --- a/pkg/sql/plan/function/seqfunc.go +++ b/pkg/sql/function/seqfunc.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/defines" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/functionUtil" + "github.com/matrixorigin/matrixone/pkg/sql/function/functionUtil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" diff --git a/pkg/sql/plan/function/type_check.go b/pkg/sql/function/type_check.go similarity index 100% rename from pkg/sql/plan/function/type_check.go rename to pkg/sql/function/type_check.go diff --git a/pkg/sql/plan/function/udf.go b/pkg/sql/function/udf.go similarity index 100% rename from pkg/sql/plan/function/udf.go rename to pkg/sql/function/udf.go diff --git a/pkg/sql/parsers/tree/clone.go b/pkg/sql/parsers/tree/clone.go index b0a0dbf4e83f6..5bb756c05fa39 100644 --- a/pkg/sql/parsers/tree/clone.go +++ b/pkg/sql/parsers/tree/clone.go @@ -19,7 +19,7 @@ import ( "fmt" "github.com/matrixorigin/matrixone/pkg/common/reuse" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" ) func init() { @@ -170,7 +170,7 @@ func DecideCloneStmtType( dstDbName string, toAccount uint32, srcAccount uint32, - subMeta *plan2.SubscriptionMeta, + subMeta *plan.SubscriptionMeta, ) (cloneType CloneStmtType) { if stmt.StmtType != NoClone { diff --git a/pkg/sql/plan/Makefile b/pkg/sql/plan/Makefile deleted file mode 100644 index 43cee9b9eed63..0000000000000 --- a/pkg/sql/plan/Makefile +++ /dev/null @@ -1,8 +0,0 @@ -CURRENT_DIR = $(shell pwd) - -.PHONY: generate_mock -generate_mock: - @go install github.com/golang/mock/mockgen@v1.6.0 - @echo "Current Directory " $(CURRENT_DIR) - @mockgen -source=../../../pkg/sql/plan/types.go -package plan -mock_names CompilerContext=MockCompilerContext2,Optimizer=MockOptimizer2 > types_mock_test.go - diff --git a/pkg/sql/plan/explain/marshal_model.go b/pkg/sql/plan/explain/marshal_model.go deleted file mode 100644 index e32a7a0a54484..0000000000000 --- a/pkg/sql/plan/explain/marshal_model.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2021 - 2022 Matrix Origin -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package explain - -import ( - "strconv" - - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/models" - "github.com/matrixorigin/matrixone/pkg/sql/plan" -) - -func buildEdge(parentNode *plan.Node, childNode *plan.Node, index int32) *models.Edge { - edge := &models.Edge{ - Id: "E" + strconv.Itoa(int(index)), - Src: strconv.FormatInt(int64(childNode.NodeId), 10), - Dst: strconv.FormatInt(int64(parentNode.NodeId), 10), - Unit: "count", - } - if childNode.AnalyzeInfo != nil { - edge.Output = childNode.AnalyzeInfo.OutputRows - } - return edge -} - -var nodeTypeToNameMap = map[plan2.Node_NodeType]string{ - plan2.Node_UNKNOWN: "UnKnown Node", - - plan2.Node_VALUE_SCAN: "Values Scan", - plan2.Node_TABLE_SCAN: "Table Scan", - plan2.Node_FUNCTION_SCAN: "Function Scan", - plan2.Node_EXTERNAL_SCAN: "External Scan", - plan2.Node_MATERIAL_SCAN: "Material Scan", - plan2.Node_SOURCE_SCAN: "Source Scan", - - plan2.Node_PROJECT: "Project", - - plan2.Node_EXTERNAL_FUNCTION: "External Function", - - plan2.Node_MATERIAL: "Material", - plan2.Node_SINK: "Sink", - plan2.Node_SINK_SCAN: "Sink Scan", - plan2.Node_RECURSIVE_SCAN: "Recursive Scan", - plan2.Node_RECURSIVE_CTE: "CTE Scan", - - plan2.Node_AGG: "Aggregate", - plan2.Node_DISTINCT: "Distinct", - plan2.Node_FILTER: "Filter", - plan2.Node_JOIN: "Join", - plan2.Node_SAMPLE: "Sample", - plan2.Node_SORT: "Sort", - plan2.Node_UNION: "Union", - plan2.Node_UNION_ALL: "Union All", - plan2.Node_UNIQUE: "Unique", - plan2.Node_WINDOW: "Window", - - plan2.Node_BROADCAST: "Broadcast", - plan2.Node_SPLIT: "Split", - plan2.Node_GATHER: "Gather", - - plan2.Node_ASSERT: "Assert", - - plan2.Node_INSERT: "Insert", - plan2.Node_DELETE: "Delete", - plan2.Node_REPLACE: "Replace", - plan2.Node_MULTI_UPDATE: "Multi Update", - - plan2.Node_LOCK_OP: "Lock Operator", - - plan2.Node_INTERSECT: "Intersect", - plan2.Node_INTERSECT_ALL: "Intersect All", - plan2.Node_MINUS: "Minus", - plan2.Node_MINUS_ALL: "Minus All", - - plan2.Node_ON_DUPLICATE_KEY: "On Duplicate Key", - plan2.Node_PRE_INSERT: "Pre Insert", - plan2.Node_PRE_INSERT_UK: "Pre Insert Unique", - plan2.Node_PRE_INSERT_SK: "Pre Insert 2nd Key", - - plan2.Node_TIME_WINDOW: "Time window", - plan2.Node_FILL: "Fill", - plan2.Node_PARTITION: "Partition", - plan2.Node_FUZZY_FILTER: "Fuzzy filter", -} diff --git a/pkg/sql/planner/Makefile b/pkg/sql/planner/Makefile new file mode 100644 index 0000000000000..9340009b4dbf2 --- /dev/null +++ b/pkg/sql/planner/Makefile @@ -0,0 +1,8 @@ +CURRENT_DIR = $(shell pwd) + +.PHONY: generate_mock +generate_mock: + @go install github.com/golang/mock/mockgen@v1.6.0 + @echo "Current Directory " $(CURRENT_DIR) + @mockgen -source=../../../pkg/sql/planner/types.go -package planner -mock_names CompilerContext=MockCompilerContext2,Optimizer=MockOptimizer2 > types_mock_test.go + diff --git a/pkg/sql/plan/agg_pushdown_pullup.go b/pkg/sql/planner/agg_pushdown_pullup.go similarity index 96% rename from pkg/sql/plan/agg_pushdown_pullup.go rename to pkg/sql/planner/agg_pushdown_pullup.go index a0cc3e0a295ff..eb386dd89a5d3 100644 --- a/pkg/sql/plan/agg_pushdown_pullup.go +++ b/pkg/sql/planner/agg_pushdown_pullup.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" @@ -78,7 +78,7 @@ func replaceCol(expr *plan.Expr, oldRelPos, oldColPos, newRelPos, newColPos int3 } } -func filterTag(expr *Expr, tag int32) *Expr { +func filterTag(expr *plan.Expr, tag int32) *plan.Expr { switch exprImpl := expr.Expr.(type) { case *plan.Expr_F: for _, arg := range exprImpl.F.Args { @@ -97,7 +97,7 @@ func filterTag(expr *Expr, tag int32) *Expr { func applyAggPushdown(agg, join, leftChild *plan.Node, builder *QueryBuilder) { leftChildTag := leftChild.BindingTags[0] - newAggList := DeepCopyExprList(agg.AggList) + newAggList := plan.DeepCopyExprList(agg.AggList) for i, aggExpr := range agg.AggList { if funExpr, ok := aggExpr.Expr.(*plan.Expr_F); ok { @@ -115,8 +115,8 @@ func applyAggPushdown(agg, join, leftChild *plan.Node, builder *QueryBuilder) { } } - //newGroupBy := DeepCopyExprList(agg.GroupBy) - newGroupBy := []*plan.Expr{DeepCopyExpr(filterTag(join.OnList[0], leftChildTag))} + //newGroupBy := plan.DeepCopyExprList(agg.GroupBy) + newGroupBy := []*plan.Expr{plan.DeepCopyExpr(filterTag(join.OnList[0], leftChildTag))} newGroupTag := builder.genNewBindTag() newAggTag := builder.genNewBindTag() @@ -176,7 +176,7 @@ func (builder *QueryBuilder) aggPushDown(nodeID int32) int32 { return nodeID } -func getJoinCondCol(cond *Expr, leftTag int32, rightTag int32) (*plan.Expr_Col, *plan.Expr_Col) { +func getJoinCondCol(cond *plan.Expr, leftTag int32, rightTag int32) (*plan.Expr_Col, *plan.Expr_Col) { fun, ok := cond.Expr.(*plan.Expr_F) if !ok || fun.F.Func.ObjName != "=" { return nil, nil @@ -248,7 +248,7 @@ func addAnyValue(expr *plan.Expr, agg *plan.Node, builder *QueryBuilder) { } if idx == -1 { idx = len(agg.AggList) - anyValueExpr, _ := BindFuncExprImplByPlanExpr(builder.compCtx.GetContext(), "any_value", []*plan.Expr{DeepCopyExpr(expr)}) + anyValueExpr, _ := BindFuncExprImplByPlanExpr(builder.compCtx.GetContext(), "any_value", []*plan.Expr{plan.DeepCopyExpr(expr)}) agg.AggList = append(agg.AggList, anyValueExpr) } col.Col.RelPos = agg.BindingTags[1] diff --git a/pkg/sql/plan/alter_util.go b/pkg/sql/planner/alter_util.go similarity index 79% rename from pkg/sql/plan/alter_util.go rename to pkg/sql/planner/alter_util.go index 4e80a444b3540..fa2271720ce9b 100644 --- a/pkg/sql/plan/alter_util.go +++ b/pkg/sql/planner/alter_util.go @@ -12,14 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "strings" + + "github.com/matrixorigin/matrixone/pkg/pb/plan" ) // FindColumn finds column in cols by name. -func FindColumn(cols []*ColDef, name string) *ColDef { +func FindColumn(cols []*plan.ColDef, name string) *plan.ColDef { for _, col := range cols { if strings.EqualFold(col.Name, name) { return col @@ -29,7 +31,7 @@ func FindColumn(cols []*ColDef, name string) *ColDef { } // FindColumnByOriginName finds column in cols by origin name. -func FindColumnByOriginName(cols []*ColDef, originName string) *ColDef { +func FindColumnByOriginName(cols []*plan.ColDef, originName string) *plan.ColDef { for _, col := range cols { if col.GetOriginCaseName() == originName { return col @@ -39,7 +41,7 @@ func FindColumnByOriginName(cols []*ColDef, originName string) *ColDef { } // FindColumn finds column in cols by colId -func FindColumnByColId(cols []*ColDef, colId uint64) *ColDef { +func FindColumnByColId(cols []*plan.ColDef, colId uint64) *plan.ColDef { for _, col := range cols { if col.ColId == colId { return col diff --git a/pkg/sql/plan/ap_multi_cn_shuffle_merge_guide.md b/pkg/sql/planner/ap_multi_cn_shuffle_merge_guide.md similarity index 94% rename from pkg/sql/plan/ap_multi_cn_shuffle_merge_guide.md rename to pkg/sql/planner/ap_multi_cn_shuffle_merge_guide.md index 867b12e9cf2fd..30333cf9495f5 100644 --- a/pkg/sql/plan/ap_multi_cn_shuffle_merge_guide.md +++ b/pkg/sql/planner/ap_multi_cn_shuffle_merge_guide.md @@ -41,14 +41,14 @@ Step 1: Decide AP multi-CN during planning - `plan.GetExecType` decides the execution mode using thresholds and flags. - `ForceScanOnMultiCN` forces AP multi-CN. - Large scan cost/blocks switch to AP (one CN or multi-CN). -- File: `pkg/sql/plan/stats.go`. +- File: `pkg/sql/planner/stats.go`. Step 2: Build the CN list and execution scopes in compile - `compileQuery` decides `execType`, then builds `cnList`. - TP or AP one CN uses only the local CN. - AP multi-CN uses `engine.Nodes()` plus availability checks. - `CalcQueryDOP` sets per-node DOP based on stats and CN count. -- Files: `pkg/sql/compile/compile.go`, `pkg/sql/plan/stats.go`. +- Files: `pkg/sql/compile/compile.go`, `pkg/sql/planner/stats.go`. Step 3: Table scan scopes per CN - `compileTableScan` calls `generateNodes` to decide which CNs scan the table. @@ -77,7 +77,7 @@ Step 5: Runtime reader construction and object/block collection - Local CN: `Policy_CollectAllData` with shuffle param. - Remote CN: `Policy_CollectCommittedPersistedData` with shuffle param, then attach tombstones from coordinator. - Files: `pkg/sql/compile/scope.go`, `pkg/sql/compile/compile.go`, - `pkg/sql/plan/shuffle.go`, `pkg/vm/engine/readutil/exec_util.go`, + `pkg/sql/planner/shuffle.go`, `pkg/vm/engine/readutil/exec_util.go`, `pkg/vm/engine/types.go`. Step 6: TableScan execution on each CN @@ -105,7 +105,7 @@ Part 3. Important Functions and Files ------------------------------------------------------------------------------- Compile and plan selection -- `pkg/sql/plan/stats.go`: `GetExecType`, `CalcQueryDOP`, multi-CN thresholds and flags. +- `pkg/sql/planner/stats.go`: `GetExecType`, `CalcQueryDOP`, multi-CN thresholds and flags. - `pkg/sql/compile/compile.go`: `compileQuery`, `getCNList`, `removeUnavailableCN`. Multi-CN scan construction @@ -115,7 +115,7 @@ Multi-CN scan construction Object/block collection and shuffle - `pkg/sql/compile/scope.go`: `getRelData`, `buildReaders`. - `pkg/sql/compile/compile.go`: `expandRanges` (wraps `rel.Ranges`). -- `pkg/sql/plan/shuffle.go`: `ShouldSkipObjByShuffle`, `CalcRangeShuffleIDXForObj`. +- `pkg/sql/planner/shuffle.go`: `ShouldSkipObjByShuffle`, `CalcRangeShuffleIDXForObj`. - `pkg/vm/engine/readutil/exec_util.go`: `FilterObjects` (applies shuffle skip). - `pkg/vm/engine/types.go`: `RangesParam`, `RangesShuffleParam`, `DataCollectPolicy`. diff --git a/pkg/sql/plan/apply_indices.go b/pkg/sql/planner/apply_indices.go similarity index 94% rename from pkg/sql/plan/apply_indices.go rename to pkg/sql/planner/apply_indices.go index 8781c647c2913..f7fa997827e71 100644 --- a/pkg/sql/plan/apply_indices.go +++ b/pkg/sql/planner/apply_indices.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -234,7 +234,7 @@ func (builder *QueryBuilder) applyIndices(nodeID int32, colRefCnt map[[2]int32]i return builder.applyIndicesForFilters(nodeID, node, colRefCnt, idxColMap), nil case plan.Node_JOIN: - return builder.applyIndicesForJoins(nodeID, node, colRefCnt, idxColMap), nil + return builder.applyIndicesForJoins(nodeID, node), nil case plan.Node_PROJECT: //NOTE: This is the entry point for vector index rule on SORT NODE. @@ -283,8 +283,8 @@ func (builder *QueryBuilder) applyIndicesForFilters(nodeID int32, node *plan.Nod if !isRuntimeConstExpr(fn.Args[1]) { goto END0 } - case "between": - case "in": + + case "between", "in_range", "in": default: goto END0 @@ -539,7 +539,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersRegularIndex(nodeID int32, no } } - indexes := make([]*IndexDef, 0, len(node.TableDef.Indexes)) + indexes := make([]*plan.IndexDef, 0, len(node.TableDef.Indexes)) for i := range node.TableDef.Indexes { if node.TableDef.Indexes[i].IndexAlgo == "fulltext" || !node.TableDef.Indexes[i].TableExist { continue @@ -552,7 +552,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersRegularIndex(nodeID int32, no scanSnapshot := node.ScanSnapshot if scanSnapshot == nil { - scanSnapshot = &Snapshot{} + scanSnapshot = &plan.Snapshot{} } // Apply unique/secondary indices if only indexed column is referenced @@ -596,7 +596,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersRegularIndex(nodeID int32, no return nodeID } -func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *IndexDef, node *plan.Node, idxTableNode *plan.Node, filterIdx []int32) { +func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *plan.IndexDef, node *plan.Node, idxTableNode *plan.Node, filterIdx []int32) { for i := range node.FilterList { // if already in filterIdx, continue applied := false @@ -625,7 +625,7 @@ func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *IndexDef, node *pl // it's an extra filter and can be applied on index idxColExpr := GetColExpr(idxTableNode.TableDef.Cols[0].Typ, idxTableNode.BindingTags[0], 0) deserialExpr, _ := MakeSerialExtractExpr(builder.GetContext(), idxColExpr, fn.Args[0].Typ, int64(k)) - newFilter := DeepCopyExpr(node.FilterList[i]) + newFilter := plan.DeepCopyExpr(node.FilterList[i]) newFilter.GetF().Args[0] = deserialExpr idxTableNode.FilterList = append(idxTableNode.FilterList, newFilter) applied = true @@ -639,7 +639,7 @@ func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *IndexDef, node *pl //single pk if col.Name == node.TableDef.Pkey.PkeyColName { idxColExpr := GetColExpr(idxTableNode.TableDef.Cols[1].Typ, idxTableNode.BindingTags[0], 1) - newFilter := DeepCopyExpr(node.FilterList[i]) + newFilter := plan.DeepCopyExpr(node.FilterList[i]) newFilter.GetF().Args[0] = idxColExpr idxTableNode.FilterList = append(idxTableNode.FilterList, newFilter) } @@ -649,7 +649,7 @@ func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *IndexDef, node *pl if col.Name == node.TableDef.Pkey.Names[k] { idxColExpr := GetColExpr(idxTableNode.TableDef.Cols[1].Typ, idxTableNode.BindingTags[0], 1) deserialExpr, _ := MakeSerialExtractExpr(builder.GetContext(), idxColExpr, fn.Args[0].Typ, int64(k)) - newFilter := DeepCopyExpr(node.FilterList[i]) + newFilter := plan.DeepCopyExpr(node.FilterList[i]) newFilter.GetF().Args[0] = deserialExpr idxTableNode.FilterList = append(idxTableNode.FilterList, newFilter) continue @@ -659,7 +659,7 @@ func (builder *QueryBuilder) applyExtraFiltersOnIndex(idxDef *IndexDef, node *pl } } -func tryMatchMoreLeadingFilters(idxDef *IndexDef, node *plan.Node, pos int32) []int32 { +func tryMatchMoreLeadingFilters(idxDef *plan.IndexDef, node *plan.Node, pos int32) []int32 { leadingPos := []int32{pos} for i := range idxDef.Parts { if i == 0 { @@ -705,7 +705,7 @@ func checkIndexFilter(fn *plan.Function) (int, *plan.ColRef) { return EqualIndexCondition, col } - case "in", "between": + case "between", "in_range", "in": col := fn.Args[0].GetCol() if col != nil { return NonEqualIndexCondition, col @@ -731,7 +731,7 @@ func checkIndexFilter(fn *plan.Function) (int, *plan.ColRef) { return UnsupportedIndexCondition, nil } -func findLeadingFilter(idxDef *IndexDef, node *plan.Node) ([]int32, bool) { +func findLeadingFilter(idxDef *plan.IndexDef, node *plan.Node) ([]int32, bool) { leadingPos := node.TableDef.Name2ColIndex[idxDef.Parts[0]] for i := range node.FilterList { filterType, col := checkIndexFilter(node.FilterList[i].GetF()) @@ -752,7 +752,7 @@ func findLeadingFilter(idxDef *IndexDef, node *plan.Node) ([]int32, bool) { func (builder *QueryBuilder) replaceEqualCondition(filterList []*plan.Expr, filterPos []int32, idxTag int32, idxTableDef *plan.TableDef, numParts int) *plan.Expr { if numParts == 1 { //directly equal - expr := DeepCopyExpr(filterList[filterPos[0]]) + expr := plan.DeepCopyExpr(filterList[filterPos[0]]) args := expr.GetF().Args args[0].GetCol().RelPos = idxTag args[0].GetCol().ColPos = 0 @@ -764,7 +764,7 @@ func (builder *QueryBuilder) replaceEqualCondition(filterList []*plan.Expr, filt serialArgs := make([]*plan.Expr, len(filterPos)) for i := range filterPos { filter := filterList[filterPos[i]] - serialArgs[i] = DeepCopyExpr(filter.GetF().Args[1]) + serialArgs[i] = plan.DeepCopyExpr(filter.GetF().Args[1]) compositeFilterSel = compositeFilterSel * filter.Selectivity } rightArg, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", serialArgs) @@ -780,7 +780,7 @@ func (builder *QueryBuilder) replaceEqualCondition(filterList []*plan.Expr, filt } func (builder *QueryBuilder) replaceNonEqualCondition(filter *plan.Expr, idxTag int32, idxTableDef *plan.TableDef, numParts int) *plan.Expr { - expr := DeepCopyExpr(filter) + expr := plan.DeepCopyExpr(filter) fn := expr.GetF() if fn.Func.ObjName == "or" { for i := range expr.GetF().Args { @@ -798,6 +798,12 @@ func (builder *QueryBuilder) replaceNonEqualCondition(filter *plan.Expr, idxTag fn.Args[1], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{fn.Args[1]}) fn.Args[2], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{fn.Args[2]}) expr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "prefix_between", fn.Args) + + case "in_range": + fn.Args[1], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{fn.Args[1]}) + fn.Args[2], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{fn.Args[2]}) + expr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "prefix_in_range", fn.Args) + case "in": fn.Args[1], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{fn.Args[1]}) expr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "prefix_in", fn.Args) @@ -813,7 +819,7 @@ func (builder *QueryBuilder) replaceLeadingFilter(filterList []*plan.Expr, leadi return builder.replaceEqualCondition(filterList, leadingPos, idxTag, idxTableDef, numParts) } -func (builder *QueryBuilder) tryIndexOnlyScan(idxDef *IndexDef, node *plan.Node, colRefCnt map[[2]int32]int, idxColMap map[[2]int32]*plan.Expr, scanSnapshot *Snapshot) int32 { +func (builder *QueryBuilder) tryIndexOnlyScan(idxDef *plan.IndexDef, node *plan.Node, colRefCnt map[[2]int32]int, idxColMap map[[2]int32]*plan.Expr, scanSnapshot *plan.Snapshot) int32 { // check if this index contains all columns needed for i := range node.TableDef.Cols { if colRefCnt[[2]int32{node.BindingTags[0], int32(i)}] > 0 { @@ -891,7 +897,7 @@ func (builder *QueryBuilder) tryIndexOnlyScan(idxDef *IndexDef, node *plan.Node, idxColMap[[2]int32{node.BindingTags[0], colIdx}] = GetColExpr(idxTableDef.Cols[1].Typ, idxTag, 1) } else { origType := node.TableDef.Cols[colIdx].Typ - mappedExpr, _ := MakeSerialExtractExpr(builder.GetContext(), DeepCopyExpr(leadingColExpr), origType, int64(i)) + mappedExpr, _ := MakeSerialExtractExpr(builder.GetContext(), plan.DeepCopyExpr(leadingColExpr), origType, int64(i)) idxColMap[[2]int32{node.BindingTags[0], colIdx}] = mappedExpr } } @@ -931,7 +937,7 @@ func (builder *QueryBuilder) tryIndexOnlyScan(idxDef *IndexDef, node *plan.Node, return idxTableNodeID } -func (builder *QueryBuilder) getIndexForNonEquiCond(indexes []*IndexDef, node *plan.Node) (int, []int32) { +func (builder *QueryBuilder) getIndexForNonEquiCond(indexes []*plan.IndexDef, node *plan.Node) (int, []int32) { // Apply single-column unique/secondary indices for non-equi expression colPos2Idx := make(map[int32]int) for i, idxDef := range indexes { @@ -958,7 +964,7 @@ func (builder *QueryBuilder) getIndexForNonEquiCond(indexes []*IndexDef, node *p return -1, nil } -func (builder *QueryBuilder) applyIndexJoin(idxDef *IndexDef, node *plan.Node, filterType int, filterIdx []int32, scanSnapshot *Snapshot) (int32, int32) { +func (builder *QueryBuilder) applyIndexJoin(idxDef *plan.IndexDef, node *plan.Node, filterType int, filterIdx []int32, scanSnapshot *plan.Snapshot) (int32, int32) { idxTag := builder.genNewBindTag() idxObjRef, idxTableDef, err := builder.compCtx.ResolveIndexTableByRef(node.ObjRef, idxDef.IndexTableName, scanSnapshot) if err != nil { @@ -989,7 +995,7 @@ func (builder *QueryBuilder) applyIndexJoin(idxDef *IndexDef, node *plan.Node, f TableDef: idxTableDef, ObjRef: idxObjRef, IndexScanInfo: idxScanInfo, - ParentObjRef: DeepCopyObjectRef(node.ObjRef), + ParentObjRef: plan.DeepCopyObjectRef(node.ObjRef), FilterList: []*plan.Expr{idxFilter}, BindingTags: []int32{idxTag}, ScanSnapshot: node.ScanSnapshot, @@ -1001,7 +1007,7 @@ func (builder *QueryBuilder) applyIndexJoin(idxDef *IndexDef, node *plan.Node, f pkExpr := GetColExpr(node.TableDef.Cols[pkIdx].Typ, node.BindingTags[0], pkIdx) joinCond, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(pkExpr), + plan.DeepCopyExpr(pkExpr), GetColExpr(pkExpr.Typ, idxTag, 1), }) joinNode := &plan.Node{ @@ -1022,7 +1028,7 @@ func (builder *QueryBuilder) applyIndexJoin(idxDef *IndexDef, node *plan.Node, f return joinNodeID, idxTableNodeID } -func (builder *QueryBuilder) getMostSelectiveIndexForPointSelect(indexes []*IndexDef, node *plan.Node, ignoreStats bool) (int, []int32) { +func (builder *QueryBuilder) getMostSelectiveIndexForPointSelect(indexes []*plan.IndexDef, node *plan.Node, ignoreStats bool) (int, []int32) { currentSel := 1.0 currentIdx := -1 savedFilterIdx := make([]int32, 0) @@ -1087,7 +1093,7 @@ func (builder *QueryBuilder) getMostSelectiveIndexForPointSelect(indexes []*Inde return currentIdx, savedFilterIdx } -func (builder *QueryBuilder) applyIndicesForJoins(nodeID int32, node *plan.Node, colRefCnt map[[2]int32]int, idxColMap map[[2]int32]*plan.Expr) int32 { +func (builder *QueryBuilder) applyIndicesForJoins(nodeID int32, node *plan.Node) int32 { sid := builder.compCtx.GetProcess().GetService() if node.JoinType != plan.Node_INNER && node.JoinType != plan.Node_RIGHT && node.JoinType != plan.Node_SEMI && @@ -1108,7 +1114,7 @@ func (builder *QueryBuilder) applyIndicesForJoins(nodeID int32, node *plan.Node, scanSnapshot := leftChild.ScanSnapshot if scanSnapshot == nil { - scanSnapshot = &Snapshot{} + scanSnapshot = &plan.Snapshot{} } //---------------------------------------------------------------------- @@ -1254,7 +1260,7 @@ func (builder *QueryBuilder) applyIndicesForJoins(nodeID int32, node *plan.Node, TableDef: idxTableDef, ObjRef: idxObjRef, IndexScanInfo: idxScanInfo, - ParentObjRef: DeepCopyObjectRef(leftChild.ObjRef), + ParentObjRef: plan.DeepCopyObjectRef(leftChild.ObjRef), BindingTags: []int32{idxTag}, ScanSnapshot: leftChild.ScanSnapshot, RuntimeFilterProbeList: []*plan.RuntimeFilterSpec{nodeProbeRuntimeFilter}, diff --git a/pkg/sql/plan/apply_indices_fulltext.go b/pkg/sql/planner/apply_indices_fulltext.go similarity index 94% rename from pkg/sql/plan/apply_indices_fulltext.go rename to pkg/sql/planner/apply_indices_fulltext.go index 347f244cc0994..48703d02cf91f 100644 --- a/pkg/sql/plan/apply_indices_fulltext.go +++ b/pkg/sql/planner/apply_indices_fulltext.go @@ -11,7 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -21,8 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" ) // The idea is as follows: @@ -62,7 +62,7 @@ func (builder *QueryBuilder) applyIndicesForProjectionUsingFullTextIndex(nodeID eqmap := builder.findEqualFullTextMatchFunc(projNode, scanNode, projids, filterids) idxID, filter_node_ids, proj_node_ids, err := builder.applyJoinFullTextIndices(nodeID, projNode, scanNode, - filterids, filterIndexDefs, projids, projIndexDef, eqmap, colRefCnt, idxColMap) + filterids, filterIndexDefs, projids, projIndexDef, eqmap) if err != nil { return -1, err } @@ -74,11 +74,11 @@ func (builder *QueryBuilder) applyIndicesForProjectionUsingFullTextIndex(nodeID // create sort node with order by score DESC - var orderByScore []*OrderBySpec + var orderByScore []*plan.OrderBySpec for _, id := range filter_node_ids { ftnode := builder.qry.Nodes[id] - orderByScore = append(orderByScore, &OrderBySpec{ - Expr: &Expr{ + orderByScore = append(orderByScore, &plan.OrderBySpec{ + Expr: &plan.Expr{ Typ: ftnode.TableDef.Cols[1].Typ, // score column Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -98,8 +98,8 @@ func (builder *QueryBuilder) applyIndicesForProjectionUsingFullTextIndex(nodeID } ftnode := builder.qry.Nodes[id] - orderByScore = append(orderByScore, &OrderBySpec{ - Expr: &Expr{ + orderByScore = append(orderByScore, &plan.OrderBySpec{ + Expr: &plan.Expr{ Typ: ftnode.TableDef.Cols[1].Typ, // score column Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -116,8 +116,8 @@ func (builder *QueryBuilder) applyIndicesForProjectionUsingFullTextIndex(nodeID NodeType: plan.Node_SORT, Children: []int32{idxID}, OrderBy: orderByScore, - Limit: DeepCopyExpr(scanNode.Limit), - Offset: DeepCopyExpr(scanNode.Offset), + Limit: plan.DeepCopyExpr(scanNode.Limit), + Offset: plan.DeepCopyExpr(scanNode.Offset), }, ctx) // move scanNode.Limit to sortNode @@ -131,7 +131,7 @@ func (builder *QueryBuilder) applyIndicesForProjectionUsingFullTextIndex(nodeID for i, id := range proj_node_ids { idx := projids[i] ftnode := builder.qry.Nodes[id] - projNode.ProjectList[idx] = &Expr{ + projNode.ProjectList[idx] = &plan.Expr{ Typ: ftnode.TableDef.Cols[1].Typ, // score column Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -169,7 +169,7 @@ func (builder *QueryBuilder) applyIndicesForAggUsingFullTextIndex(nodeID int32, eqmap := make(map[int32]int32) idxID, _, _, err := builder.applyJoinFullTextIndices(nodeID, projNode, scanNode, - filterids, filterIndexDefs, projids, projIndexDefs, eqmap, colRefCnt, idxColMap) + filterids, filterIndexDefs, projids, projIndexDefs, eqmap) if err != nil { return -1, err } @@ -182,7 +182,7 @@ func (builder *QueryBuilder) applyIndicesForAggUsingFullTextIndex(nodeID int32, func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *plan.Node, scanNode *plan.Node, filterids []int32, filter_indexDefs []*plan.IndexDef, projids []int32, proj_indexDefs []*plan.IndexDef, eqmap map[int32]int32, - colRefCnt map[[2]int32]int, idxColMap map[[2]int32]*plan.Expr) (int32, []int32, []int32, error) { +) (int32, []int32, []int32, error) { ctx := builder.ctxByNode[nodeID] @@ -235,7 +235,7 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl // buildFullTextIndexScan var last_node_id int32 - var last_ftnode_pkcol *Expr + var last_ftnode_pkcol *plan.Expr for i := 0; i < len(ft_filters); i++ { ftidxscan := ft_filters[i] @@ -301,7 +301,7 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl curr_ftnode := builder.qry.Nodes[curr_ftnode_id] curr_ftnode_tag := curr_ftnode.BindingTags[0] - curr_ftnode_pkcol := &Expr{ + curr_ftnode_pkcol := &plan.Expr{ Typ: pkType, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -313,7 +313,7 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl // pushdown limit if limitExpr != nil { - curr_ftnode.Limit = DeepCopyExpr(limitExpr) + curr_ftnode.Limit = plan.DeepCopyExpr(limitExpr) } // change doc_id type to the primary type here @@ -326,7 +326,7 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl // JOIN INNER with children (curr_ftnode_id, last_node_id) // oncond - wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ curr_ftnode_pkcol, last_ftnode_pkcol, }) @@ -334,20 +334,20 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl NodeType: plan.Node_JOIN, Children: []int32{curr_ftnode_id, last_node_id}, JoinType: plan.Node_INNER, - OnList: []*Expr{wherePkEqPk}, + OnList: []*plan.Expr{wherePkEqPk}, }, ctx) } else { last_node_id = curr_ftnode_id } - last_ftnode_pkcol = DeepCopyExpr(curr_ftnode_pkcol) + last_ftnode_pkcol = plan.DeepCopyExpr(curr_ftnode_pkcol) } // JOIN INNER with children (nodeId, FullTextIndexScanId) // oncond - wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: pkType, Expr: &plan.Expr_Col{ @@ -372,7 +372,7 @@ func (builder *QueryBuilder) applyJoinFullTextIndices(nodeID int32, projNode *pl NodeType: plan.Node_JOIN, Children: []int32{scanNode.NodeId, last_node_id}, JoinType: plan.Node_INNER, - OnList: []*Expr{wherePkEqPk}, + OnList: []*plan.Expr{wherePkEqPk}, }, ctx) return joinnodeID, ret_filter_node_ids, ret_proj_node_ids, nil @@ -534,7 +534,7 @@ func (builder *QueryBuilder) getFullTextMatchScoreExpr(expr *plan.Expr) *plan.Ex returnType = fGet.GetReturnType() exprType := makePlan2Type(&returnType) exprType.NotNullable = function.DeduceNotNullable(funcID, fn.Args) - newExpr := &Expr{ + newExpr := &plan.Expr{ Typ: exprType, Expr: &plan.Expr_F{ F: &plan.Function{ diff --git a/pkg/sql/plan/apply_indices_hnsw.go b/pkg/sql/planner/apply_indices_hnsw.go similarity index 92% rename from pkg/sql/plan/apply_indices_hnsw.go rename to pkg/sql/planner/apply_indices_hnsw.go index a8cba70f6200c..8068382c10eb6 100644 --- a/pkg/sql/plan/apply_indices_hnsw.go +++ b/pkg/sql/planner/apply_indices_hnsw.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" "github.com/matrixorigin/matrixone/pkg/vectorindex/metric" ) @@ -143,7 +143,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * Name: kHNSWSearchFuncName, Param: []byte(hnswCtx.params), }, - Cols: DeepCopyColDefList(kHNSWSearchColDefs), + Cols: plan.DeepCopyColDefList(kHNSWSearchColDefs), }, BindingTags: []int32{tableFuncTag}, TblFuncExprList: []*plan.Expr{ @@ -159,7 +159,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * }, }, }, - DeepCopyExpr(hnswCtx.vecLitArg), + plan.DeepCopyExpr(hnswCtx.vecLitArg), }, } tableFuncNodeID := builder.appendNode(tableFuncNode, ctx) @@ -182,7 +182,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * overFetchFactor := calculatePostFilterOverFetchFactor(originalLimit) newLimit := max(uint64(float64(originalLimit)*overFetchFactor), originalLimit+10) - tableFuncNode.Limit = &Expr{ + tableFuncNode.Limit = &plan.Expr{ Typ: limit.Typ, Expr: &plan.Expr_Lit{ Lit: &plan.Literal{ @@ -195,15 +195,15 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * } } else { // If limit is not a constant, just copy it - tableFuncNode.Limit = DeepCopyExpr(limit) + tableFuncNode.Limit = plan.DeepCopyExpr(limit) } } else { // No filters, use original limit - tableFuncNode.Limit = DeepCopyExpr(limit) + tableFuncNode.Limit = plan.DeepCopyExpr(limit) } // oncond - wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: hnswCtx.pkType, Expr: &plan.Expr_Col{ @@ -228,7 +228,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * NodeType: plan.Node_JOIN, Children: []int32{scanNode.NodeId, tableFuncNodeID}, JoinType: plan.Node_INNER, - OnList: []*Expr{wherePkEqPk}, + OnList: []*plan.Expr{wherePkEqPk}, // Don't set Limit/Offset on JOIN - they should be applied after SORT }, ctx) @@ -238,9 +238,9 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * scanNode.Offset = nil // Create SortBy with distance column from table function - orderByScore := []*OrderBySpec{ + orderByScore := []*plan.OrderBySpec{ { - Expr: &Expr{ + Expr: &plan.Expr{ Typ: tableFuncNode.TableDef.Cols[1].Typ, // score column Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -257,8 +257,8 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * NodeType: plan.Node_SORT, Children: []int32{joinNodeID}, OrderBy: orderByScore, - Limit: limit, // Apply LIMIT after sorting - Offset: DeepCopyExpr(sortNode.Offset), // Apply OFFSET after sorting + Limit: limit, // Apply LIMIT after sorting + Offset: plan.DeepCopyExpr(sortNode.Offset), // Apply OFFSET after sorting }, ctx) projNode.Children[0] = sortByID @@ -268,7 +268,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingHnsw(nodeID int32, vecCtx * projMap := make(map[[2]int32]*plan.Expr) for i, proj := range childNode.ProjectList { if i == int(sortIdx) { - projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = DeepCopyExpr(orderByScore[0].Expr) + projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = plan.DeepCopyExpr(orderByScore[0].Expr) } else { projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = proj } diff --git a/pkg/sql/plan/apply_indices_hnsw_test.go b/pkg/sql/planner/apply_indices_hnsw_test.go similarity index 98% rename from pkg/sql/plan/apply_indices_hnsw_test.go rename to pkg/sql/planner/apply_indices_hnsw_test.go index 024a1acd2d422..a92b03044c8d5 100644 --- a/pkg/sql/plan/apply_indices_hnsw_test.go +++ b/pkg/sql/planner/apply_indices_hnsw_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -78,7 +78,7 @@ func TestPrepareHnswIndexContext_ForceModeEnabled(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -98,7 +98,7 @@ func TestPrepareHnswIndexContext_NilMetaDef(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -120,7 +120,7 @@ func TestPrepareHnswIndexContext_NilIdxDef(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -142,7 +142,7 @@ func TestPrepareHnswIndexContext_InvalidIndexAlgoParams(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -166,7 +166,7 @@ func TestPrepareHnswIndexContext_MissingOpType(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -190,7 +190,7 @@ func TestPrepareHnswIndexContext_OpTypeNotString(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -214,7 +214,7 @@ func TestPrepareHnswIndexContext_OpTypeMismatch(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -269,7 +269,7 @@ func TestPrepareHnswIndexContext_ArgsNotFound(t *testing.T) { // (e.g., both args are literals instead of col + literal) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -351,7 +351,7 @@ func TestPrepareHnswIndexContext_ResolveVariableError(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -437,7 +437,7 @@ func TestPrepareHnswIndexContext_Success(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -570,7 +570,7 @@ func TestPrepareHnswIndexContext_DifferentDistanceFunctions(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: tc.funcName, }, Args: []*plan.Expr{ diff --git a/pkg/sql/plan/apply_indices_ivfflat.go b/pkg/sql/planner/apply_indices_ivfflat.go similarity index 96% rename from pkg/sql/plan/apply_indices_ivfflat.go rename to pkg/sql/planner/apply_indices_ivfflat.go index c4ce8c092b2a8..7a870e2fb58bd 100644 --- a/pkg/sql/plan/apply_indices_ivfflat.go +++ b/pkg/sql/planner/apply_indices_ivfflat.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -256,7 +256,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt Name: kIVFSearchFuncName, Param: []byte(ivfCtx.params), }, - Cols: DeepCopyColDefList(kIVFSearchColDefs), + Cols: plan.DeepCopyColDefList(kIVFSearchColDefs), }, BindingTags: []int32{tableFuncTag}, TblFuncExprList: []*plan.Expr{ @@ -272,7 +272,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt }, }, }, - DeepCopyExpr(ivfCtx.vecLitArg), + plan.DeepCopyExpr(ivfCtx.vecLitArg), }, } tableFuncNodeID := builder.appendNode(tableFuncNode, ctx) @@ -291,7 +291,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt // pushdown limit to Table Function // When there are filters, over-fetch to get more candidates // This ensures we have enough candidates after filtering - limitExpr := DeepCopyExpr(limit) + limitExpr := plan.DeepCopyExpr(limit) if len(scanNode.FilterList) > 0 { // Over-fetch strategy: dynamically adjust factor based on limit size // Smaller limits need more over-fetching due to higher variance @@ -302,7 +302,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt overFetchFactor := calculatePostFilterOverFetchFactor(originalLimit) newLimit := max(uint64(float64(originalLimit)*overFetchFactor), originalLimit+10) - limitExpr = &Expr{ + limitExpr = &plan.Expr{ Typ: limit.Typ, Expr: &plan.Expr_Lit{ Lit: &plan.Literal{ @@ -349,7 +349,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt } for key, value := range idxColMap { if key[0] == oldTag { - idxColMap[[2]int32{newTag, key[1]}] = DeepCopyExpr(value) + idxColMap[[2]int32{newTag, key[1]}] = plan.DeepCopyExpr(value) } } } @@ -387,7 +387,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt }, ctx) // inner join: (ivf_search table function JOIN second table project) - innerJoinOn, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + innerJoinOn, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: ivfCtx.pkType, Expr: &plan.Expr_Col{ @@ -412,7 +412,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt NodeType: plan.Node_JOIN, Children: []int32{tableFuncNodeID, secondProjectNodeID}, JoinType: plan.Node_INNER, - OnList: []*Expr{innerJoinOn}, + OnList: []*plan.Expr{innerJoinOn}, // Don't set Limit/Offset on JOIN - they should be applied after SORT }, ctx) @@ -449,7 +449,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt tableFuncNode.RuntimeFilterProbeList = []*plan.RuntimeFilterSpec{probeSpec} // outer join: original table JOIN (inner ivf join) - outerOn, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + outerOn, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: ivfCtx.pkType, Expr: &plan.Expr_Col{ @@ -474,7 +474,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt NodeType: plan.Node_JOIN, Children: []int32{scanNode.NodeId, innerJoinNodeID}, JoinType: plan.Node_INNER, - OnList: []*Expr{outerOn}, + OnList: []*plan.Expr{outerOn}, // Don't set Limit/Offset on JOIN - they should be applied after SORT }, ctx) @@ -496,7 +496,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt }, }, } - probeSpec2 := MakeRuntimeFilter(rfTag2, false, 0, DeepCopyExpr(probeExpr2), false) + probeSpec2 := MakeRuntimeFilter(rfTag2, false, 0, plan.DeepCopyExpr(probeExpr2), false) scanNode.RuntimeFilterProbeList = append(scanNode.RuntimeFilterProbeList, probeSpec2) // build: placeholder column, HashBuild will generate IN-list based on build side join key's UniqueJoinKeys[0] @@ -522,7 +522,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt joinRootID = outerJoinNodeID } else { // JOIN( table, ivf ) - wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: ivfCtx.pkType, Expr: &plan.Expr_Col{ @@ -547,7 +547,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt NodeType: plan.Node_JOIN, Children: []int32{scanNode.NodeId, tableFuncNodeID}, JoinType: plan.Node_INNER, - OnList: []*Expr{wherePkEqPk}, + OnList: []*plan.Expr{wherePkEqPk}, // Don't set Limit/Offset on JOIN - they should be applied after SORT }, ctx) @@ -561,7 +561,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt scanNode.Offset = nil // Create SortBy, still sort directly by table function's score, let remap map ColRef to corresponding output column - orderByScore := []*OrderBySpec{ + orderByScore := []*plan.OrderBySpec{ { Expr: &plan.Expr{ Typ: tableFuncNode.TableDef.Cols[1].Typ, // score column @@ -580,8 +580,8 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt NodeType: plan.Node_SORT, Children: []int32{joinRootID}, OrderBy: orderByScore, - Limit: limit, // Apply LIMIT after sorting - Offset: DeepCopyExpr(sortNode.Offset), // Apply OFFSET after sorting + Limit: limit, // Apply LIMIT after sorting + Offset: plan.DeepCopyExpr(sortNode.Offset), // Apply OFFSET after sorting }, ctx) projNode.Children[0] = sortByID @@ -591,7 +591,7 @@ func (builder *QueryBuilder) applyIndicesForSortUsingIvfflat(nodeID int32, vecCt projMap := make(map[[2]int32]*plan.Expr) for i, proj := range childNode.ProjectList { if i == int(sortIdx) { - projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = DeepCopyExpr(orderByScore[0].Expr) + projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = plan.DeepCopyExpr(orderByScore[0].Expr) } else { projMap[[2]int32{childNode.BindingTags[0], int32(i)}] = proj } @@ -631,7 +631,7 @@ func (builder *QueryBuilder) buildPkExprFromNode(nodeID int32, pkType plan.Type, for _, expr := range node.ProjectList { if col := expr.GetCol(); col != nil { if builder.getColName(col) == pkName { - return DeepCopyExpr(expr) + return plan.DeepCopyExpr(expr) } } } diff --git a/pkg/sql/plan/apply_indices_ivfflat_optimize_test.go b/pkg/sql/planner/apply_indices_ivfflat_optimize_test.go similarity index 98% rename from pkg/sql/plan/apply_indices_ivfflat_optimize_test.go rename to pkg/sql/planner/apply_indices_ivfflat_optimize_test.go index 14f0a4109174c..187ad0b1bfe61 100644 --- a/pkg/sql/plan/apply_indices_ivfflat_optimize_test.go +++ b/pkg/sql/planner/apply_indices_ivfflat_optimize_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" @@ -108,7 +108,7 @@ func TestApplyIndicesForSortUsingIvfflat_PushdownOptimization(t *testing.T) { scanNode := builder.qry.Nodes[scanNodeID] distFnExpr := &plan.Function{ - Func: &ObjectRef{ObjName: "l2_distance"}, + Func: &plan.ObjectRef{ObjName: "l2_distance"}, Args: []*plan.Expr{ {Typ: float32Typ, Expr: &plan.Expr_Col{Col: &plan.ColRef{RelPos: scanNode.BindingTags[0], ColPos: 1}}}, {Typ: float32Typ, Expr: &plan.Expr_Lit{Lit: &plan.Literal{Value: &plan.Literal_VecVal{VecVal: "[1,1,1]"}}}}, @@ -154,7 +154,7 @@ func TestApplyIndicesForSortUsingIvfflat_PushdownOptimization(t *testing.T) { } distFnExpr := &plan.Function{ - Func: &ObjectRef{ObjName: "l2_distance"}, + Func: &plan.ObjectRef{ObjName: "l2_distance"}, Args: []*plan.Expr{ {Typ: float32Typ, Expr: &plan.Expr_Col{Col: &plan.ColRef{RelPos: scanNode.BindingTags[0], ColPos: 1}}}, {Typ: float32Typ, Expr: &plan.Expr_Lit{Lit: &plan.Literal{Value: &plan.Literal_VecVal{VecVal: "[1,1,1]"}}}}, diff --git a/pkg/sql/plan/apply_indices_ivfflat_test.go b/pkg/sql/planner/apply_indices_ivfflat_test.go similarity index 99% rename from pkg/sql/plan/apply_indices_ivfflat_test.go rename to pkg/sql/planner/apply_indices_ivfflat_test.go index 97616001db969..d27d37cf5b453 100644 --- a/pkg/sql/plan/apply_indices_ivfflat_test.go +++ b/pkg/sql/planner/apply_indices_ivfflat_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -69,7 +69,7 @@ func TestPrepareIvfIndexContext_ForceModeEnabled(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -89,7 +89,7 @@ func TestPrepareIvfIndexContext_NilMetaDef(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -112,7 +112,7 @@ func TestPrepareIvfIndexContext_NilIdxDef(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -135,7 +135,7 @@ func TestPrepareIvfIndexContext_NilEntriesDef(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -158,7 +158,7 @@ func TestPrepareIvfIndexContext_InvalidIndexAlgoParams(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -183,7 +183,7 @@ func TestPrepareIvfIndexContext_OpTypeMismatch(t *testing.T) { builder := NewQueryBuilder(plan.Query_SELECT, NewMockCompilerContext(true), false, true) vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, }, @@ -236,7 +236,7 @@ func TestPrepareIvfIndexContext_ArgsNotFound(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -319,7 +319,7 @@ func TestPrepareIvfIndexContext_ResolveVariableError_IvfThreads(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -408,7 +408,7 @@ func TestPrepareIvfIndexContext_ResolveVariableError_ProbeLimit(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -497,7 +497,7 @@ func TestPrepareIvfIndexContext_ProbeLimitNotInt64(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ @@ -586,7 +586,7 @@ func TestPrepareIvfIndexContext_Success(t *testing.T) { vecCtx := &vectorSortContext{ distFnExpr: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "l2_distance", }, Args: []*plan.Expr{ diff --git a/pkg/sql/plan/apply_indices_master.go b/pkg/sql/planner/apply_indices_master.go similarity index 82% rename from pkg/sql/plan/apply_indices_master.go rename to pkg/sql/planner/apply_indices_master.go index ee2e9bf077d08..687226f3bcd77 100644 --- a/pkg/sql/plan/apply_indices_master.go +++ b/pkg/sql/planner/apply_indices_master.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" @@ -20,7 +20,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) var ( @@ -33,7 +33,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 var pkType = scanNode.TableDef.Cols[pkPos].Typ var colDefs = scanNode.TableDef.Cols - var prevIndexPkCol *Expr + var prevIndexPkCol *plan.Expr var prevLastNodeId int32 var lastNodeId int32 @@ -54,7 +54,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 // ON idx1.pk = idx2.pk // ... lastNodeId = currScanId - currIndexPkCol := &Expr{ + currIndexPkCol := &plan.Expr{ Typ: pkType, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -64,7 +64,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 }, } if i != 0 { - wherePrevPkEqCurrPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePrevPkEqCurrPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ currIndexPkCol, prevIndexPkCol, }) @@ -72,16 +72,16 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 NodeType: plan.Node_JOIN, JoinType: plan.Node_INNER, Children: []int32{currScanId, prevLastNodeId}, - OnList: []*Expr{wherePrevPkEqCurrPk}, + OnList: []*plan.Expr{wherePrevPkEqCurrPk}, }, builder.ctxByNode[nodeID]) } - prevIndexPkCol = DeepCopyExpr(currIndexPkCol) + prevIndexPkCol = plan.DeepCopyExpr(currIndexPkCol) prevLastNodeId = lastNodeId } lastNodeFromIndexTbl := builder.qry.Nodes[lastNodeId] - lastNodeFromIndexTbl.Limit = DeepCopyExpr(scanNode.Limit) - lastNodeFromIndexTbl.Offset = DeepCopyExpr(scanNode.Offset) + lastNodeFromIndexTbl.Limit = plan.DeepCopyExpr(scanNode.Limit) + lastNodeFromIndexTbl.Offset = plan.DeepCopyExpr(scanNode.Offset) scanNode.Limit, scanNode.Offset = nil, nil // 3. SELECT * from tbl INNER JOIN ( @@ -90,7 +90,7 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 // (SELECT pk from idx2 WHERE prefix_eq(`__mo_index_idx_col`,serial_full("1","value2")) ) // ON idx1.pk = idx2.pk // ) ON tbl.pk = idx1.pk - wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + wherePkEqPk, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ { Typ: pkType, Expr: &plan.Expr_Col{ @@ -114,16 +114,16 @@ func (builder *QueryBuilder) applyIndicesForFiltersUsingMasterIndex(nodeID int32 NodeType: plan.Node_JOIN, JoinType: plan.Node_INDEX, Children: []int32{scanNode.NodeId, lastNodeId}, - OnList: []*Expr{wherePkEqPk}, - Limit: DeepCopyExpr(lastNodeFromIndexTbl.Limit), - Offset: DeepCopyExpr(lastNodeFromIndexTbl.Offset), + OnList: []*plan.Expr{wherePkEqPk}, + Limit: plan.DeepCopyExpr(lastNodeFromIndexTbl.Limit), + Offset: plan.DeepCopyExpr(lastNodeFromIndexTbl.Offset), }, builder.ctxByNode[nodeID]) return lastNodeId } func makeIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, filterExp *plan.Expr, - idxTableDef *TableDef, idxObjRef *ObjectRef, scanSnapshot *Snapshot, colDefs []*plan.ColDef) (int32, int32) { + idxTableDef *plan.TableDef, idxObjRef *plan.ObjectRef, scanSnapshot *plan.Snapshot, colDefs []*plan.ColDef) (int32, int32) { // a. Scan * WHERE prefix_eq(`__mo_index_idx_col`,serial_full("0","value")) idxScanTag := builder.genNewBindTag() @@ -148,10 +148,11 @@ func makeIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, filterExp *pl args[1], // value }) - filterList, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "prefix_eq", []*Expr{ + filterList, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "prefix_eq", []*plan.Expr{ indexKeyCol, // __mo_index_idx_col serialExpr1, // serial_full("0","value") }) + case "between": serialExpr1, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial_full", []*plan.Expr{ makePlan2StringConstExprWithType(getColSeqFromColDef(colDefs[args[0].GetCol().GetColPos()])), // "0" @@ -161,7 +162,22 @@ func makeIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, filterExp *pl makePlan2StringConstExprWithType(getColSeqFromColDef(colDefs[args[0].GetCol().GetColPos()])), // "0" args[2], // value2 }) - filterList, _ = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "prefix_between", []*Expr{ + filterList, _ = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "prefix_between", []*plan.Expr{ + indexKeyCol, // __mo_index_idx_col + serialExpr1, // serial_full("0","value1") + serialExpr2, // serial_full("0","value2") + }) + + case "in_range": + serialExpr1, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial_full", []*plan.Expr{ + makePlan2StringConstExprWithType(getColSeqFromColDef(colDefs[args[0].GetCol().GetColPos()])), // "0" + args[1], // value1 + }) + serialExpr2, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial_full", []*plan.Expr{ + makePlan2StringConstExprWithType(getColSeqFromColDef(colDefs[args[0].GetCol().GetColPos()])), // "0" + args[2], // value2 + }) + filterList, _ = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "prefix_in_range", []*plan.Expr{ indexKeyCol, // __mo_index_idx_col serialExpr1, // serial_full("0","value1") serialExpr2, // serial_full("0","value2") @@ -210,7 +226,7 @@ func makeIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, filterExp *pl arg1ForPrefixInVec.Free(mp) arg0AsColNameVec.Free(mp) - filterList, _ = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "prefix_in", []*Expr{ + filterList, _ = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "prefix_in", []*plan.Expr{ indexKeyCol, // __mo_index_idx_col arg1ForPrefixInLitVec, // (serial_full("0","value1"), serial_full("0","value2"), serial_full("0","value3")) }) @@ -223,7 +239,7 @@ func makeIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, filterExp *pl // the Storage Engine layer. Otherwise, we will end up scanning all the rows. builder.addNameByColRef(idxScanTag, idxTableDef) - scanId := builder.appendNode(&Node{ + scanId := builder.appendNode(&plan.Node{ NodeType: plan.Node_TABLE_SCAN, TableDef: idxTableDef, ObjRef: idxObjRef, diff --git a/pkg/sql/plan/apply_indices_test.go b/pkg/sql/planner/apply_indices_test.go similarity index 99% rename from pkg/sql/plan/apply_indices_test.go rename to pkg/sql/planner/apply_indices_test.go index b8507ef7660ed..f6746967c7d8e 100644 --- a/pkg/sql/plan/apply_indices_test.go +++ b/pkg/sql/planner/apply_indices_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" diff --git a/pkg/sql/plan/apply_indices_vector.go b/pkg/sql/planner/apply_indices_vector.go similarity index 99% rename from pkg/sql/plan/apply_indices_vector.go rename to pkg/sql/planner/apply_indices_vector.go index 681ef00fcafa9..adbd0cd739960 100644 --- a/pkg/sql/plan/apply_indices_vector.go +++ b/pkg/sql/planner/apply_indices_vector.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import "github.com/matrixorigin/matrixone/pkg/pb/plan" diff --git a/pkg/sql/plan/associative_law.go b/pkg/sql/planner/associative_law.go similarity index 99% rename from pkg/sql/plan/associative_law.go rename to pkg/sql/planner/associative_law.go index af395e8505858..5746d878888e8 100644 --- a/pkg/sql/plan/associative_law.go +++ b/pkg/sql/planner/associative_law.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" diff --git a/pkg/sql/plan/base_binder.go b/pkg/sql/planner/base_binder.go similarity index 96% rename from pkg/sql/plan/base_binder.go rename to pkg/sql/planner/base_binder.go index b28002886959b..17b2b11b79b80 100644 --- a/pkg/sql/plan/base_binder.go +++ b/pkg/sql/planner/base_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -27,10 +27,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/util/errutil" "github.com/matrixorigin/matrixone/pkg/vm/process" @@ -52,7 +52,7 @@ var kAlwaysFalseExpr = &plan.Expr{ }, } -func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) (expr *Expr, err error) { +func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) (expr *plan.Expr, err error) { switch exprImpl := astExpr.(type) { case *tree.NumVal: if d, ok := b.impl.(*DefaultBinder); ok { @@ -112,7 +112,7 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( curScope := (*tmpScope)[i] if _, ok := curScope[exprImpl.ColName()]; ok { typ := types.T_text.ToType() - expr = &Expr{ + expr = &plan.Expr{ Typ: makePlan2Type(&typ), Expr: &plan.Expr_V{ V: &plan.VarRef{ @@ -137,7 +137,7 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( if err != nil { return } - var typ Type + var typ plan.Type typ, err = getTypeFromAst(b.GetContext(), exprImpl.Type) if err != nil { return @@ -172,8 +172,8 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( expr, err = b.bindFuncExprImplByAstExpr("isnotfalse", []tree.Expr{exprImpl.Expr}, depth) case *tree.Tuple: - exprs := make([]*Expr, 0, len(exprImpl.Exprs)) - var planItem *Expr + exprs := make([]*plan.Expr, 0, len(exprImpl.Exprs)) + var planItem *plan.Expr for _, astItem := range exprImpl.Exprs { planItem, err = b.impl.BindExpr(astItem, depth, false) if err != nil { @@ -181,7 +181,7 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( } exprs = append(exprs, planItem) } - expr = &Expr{ + expr = &plan.Expr{ Expr: &plan.Expr_List{ List: &plan.ExprList{ List: exprs, @@ -205,13 +205,13 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( expr, err = b.impl.BindSubquery(exprImpl, isRoot) case *tree.DefaultVal: - return &Expr{ + return &plan.Expr{ Typ: plan.Type{ Id: int32(types.T_bool), NotNullable: true, }, Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Defaultval{ Defaultval: true, @@ -220,9 +220,9 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( }, }, nil case *tree.UpdateVal: - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_UpdateVal{ UpdateVal: true, @@ -231,9 +231,9 @@ func (b *baseBinder) baseBindExpr(astExpr tree.Expr, depth int32, isRoot bool) ( }, }, nil case *tree.MaxValue: - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Max{ - Max: &MaxValue{ + Max: &plan.MaxValue{ Value: "maxvalue", }, }, @@ -280,7 +280,7 @@ func unwrapParenExpr(astExpr tree.Expr) tree.Expr { func (b *baseBinder) baseBindParam(astExpr *tree.ParamExpr, depth int32, isRoot bool) (expr *plan.Expr, err error) { typ := types.T_text.ToType() - return &Expr{ + return &plan.Expr{ Typ: makePlan2Type(&typ), Expr: &plan.Expr_P{ P: &plan.ParamRef{ @@ -292,7 +292,7 @@ func (b *baseBinder) baseBindParam(astExpr *tree.ParamExpr, depth int32, isRoot func (b *baseBinder) baseBindVar(astExpr *tree.VarExpr, depth int32, isRoot bool) (expr *plan.Expr, err error) { typ := types.T_text.ToType() - return &Expr{ + return &plan.Expr{ Typ: makePlan2Type(&typ), Expr: &plan.Expr_V{ V: &plan.VarRef{ @@ -346,7 +346,7 @@ func (b *baseBinder) baseBindColRef(astExpr *tree.UnresolvedName, depth int32, i if binding != nil { relPos = binding.tag colPos = binding.colIdByName[col] - typ = DeepCopyType(binding.types[colPos]) + typ = plan.DeepCopyType(binding.types[colPos]) table = binding.table } else { return nil, moerr.NewInvalidInputf(b.GetContext(), "ambiguous column reference '%v'", name) @@ -406,7 +406,7 @@ func (b *baseBinder) baseBindColRef(astExpr *tree.UnresolvedName, depth int32, i return nil, moerr.NewInvalidInputf(b.GetContext(), "ambiguous column reference '%v'", name) } if colPos != NotFound { - typ = DeepCopyType(binding.types[colPos]) + typ = plan.DeepCopyType(binding.types[colPos]) relPos = binding.tag } else { err = moerr.NewInvalidInputf(localErrCtx, "column '%s' does not exist", name) @@ -426,7 +426,7 @@ func (b *baseBinder) baseBindColRef(astExpr *tree.UnresolvedName, depth int32, i } // bind ast function's args - args := make([]*Expr, len(astArgs)+1) + args := make([]*plan.Expr, len(astArgs)+1) for idx, arg := range astArgs { if idx == len(args)-1 { continue @@ -437,7 +437,7 @@ func (b *baseBinder) baseBindColRef(astExpr *tree.UnresolvedName, depth int32, i } args[idx] = expr } - args[len(args)-1] = &Expr{ + args[len(args)-1] = &plan.Expr{ Typ: *typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -502,7 +502,7 @@ func (b *baseBinder) baseBindColRef(astExpr *tree.UnresolvedName, depth int32, i return } -func (b *baseBinder) baseBindSubquery(astExpr *tree.Subquery, isRoot bool) (*Expr, error) { +func (b *baseBinder) baseBindSubquery(astExpr *tree.Subquery, isRoot bool) (*plan.Expr, error) { if b.ctx == nil { return nil, moerr.NewInvalidInput(b.GetContext(), "field reference doesn't support SUBQUERY") } @@ -553,7 +553,7 @@ func (b *baseBinder) baseBindSubquery(astExpr *tree.Subquery, isRoot bool) (*Exp return returnExpr, nil } -func (b *baseBinder) bindCaseExpr(astExpr *tree.CaseExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindCaseExpr(astExpr *tree.CaseExpr, depth int32, isRoot bool) (*plan.Expr, error) { args := make([]tree.Expr, 0, len(astExpr.Whens)+1) caseExist := astExpr.Expr != nil @@ -576,7 +576,7 @@ func (b *baseBinder) bindCaseExpr(astExpr *tree.CaseExpr, depth int32, isRoot bo return b.bindFuncExprImplByAstExpr("case", args, depth) } -func (b *baseBinder) bindRangeCond(astExpr *tree.RangeCond, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindRangeCond(astExpr *tree.RangeCond, depth int32, isRoot bool) (*plan.Expr, error) { if astExpr.Not { // rewrite 'col not between 1, 20' to 'col < 1 or col > 20' newLeftExpr := tree.NewComparisonExpr(tree.LESS_THAN, astExpr.Left, astExpr.From) @@ -593,7 +593,7 @@ func (b *baseBinder) bindRangeCond(astExpr *tree.RangeCond, depth int32, isRoot } } -func (b *baseBinder) bindUnaryExpr(astExpr *tree.UnaryExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindUnaryExpr(astExpr *tree.UnaryExpr, depth int32, isRoot bool) (*plan.Expr, error) { switch astExpr.Op { case tree.UNARY_MINUS: return b.bindFuncExprImplByAstExpr("unary_minus", []tree.Expr{astExpr.Expr}, depth) @@ -607,7 +607,7 @@ func (b *baseBinder) bindUnaryExpr(astExpr *tree.UnaryExpr, depth int32, isRoot return nil, moerr.NewNYIf(b.GetContext(), "'%v'", astExpr) } -func (b *baseBinder) bindBinaryExpr(astExpr *tree.BinaryExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindBinaryExpr(astExpr *tree.BinaryExpr, depth int32, isRoot bool) (*plan.Expr, error) { switch astExpr.Op { case tree.PLUS: return b.bindFuncExprImplByAstExpr("+", []tree.Expr{astExpr.Left, astExpr.Right}, depth) @@ -635,7 +635,7 @@ func (b *baseBinder) bindBinaryExpr(astExpr *tree.BinaryExpr, depth int32, isRoo return nil, moerr.NewNYIf(b.GetContext(), "'%v' operator", astExpr.Op.ToString()) } -func (b *baseBinder) bindComparisonExpr(astExpr *tree.ComparisonExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindComparisonExpr(astExpr *tree.ComparisonExpr, depth int32, isRoot bool) (*plan.Expr, error) { var op string leftAst := unwrapParenExpr(astExpr.Left) rightAst := unwrapParenExpr(astExpr.Right) @@ -1077,7 +1077,7 @@ func (b *baseBinder) bindTupleInByAst(leftTuple *tree.Tuple, rightTuple *tree.Tu return newExpr, nil } -func (b *baseBinder) bindFuncExpr(astExpr *tree.FuncExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindFuncExpr(astExpr *tree.FuncExpr, depth int32, isRoot bool) (*plan.Expr, error) { funcRef, ok := astExpr.Func.FunctionReference.(*tree.UnresolvedName) if !ok { return nil, moerr.NewNYIf(b.GetContext(), "function expr '%v'", astExpr) @@ -1101,9 +1101,9 @@ func (b *baseBinder) bindFuncExpr(astExpr *tree.FuncExpr, depth int32, isRoot bo return b.bindFuncExprImplByAstExpr(funcName, astExpr.Exprs, depth) } -func (b *baseBinder) bindFullTextMatchExpr(astExpr *tree.FullTextMatchExpr, depth int32, isRoot bool) (*Expr, error) { +func (b *baseBinder) bindFullTextMatchExpr(astExpr *tree.FullTextMatchExpr, depth int32, isRoot bool) (*plan.Expr, error) { - args := make([]*Expr, 2+len(astExpr.KeyParts)) + args := make([]*plan.Expr, 2+len(astExpr.KeyParts)) mode := int64(astExpr.Mode) args[0] = makePlan2StringConstExprWithType(astExpr.Pattern, false) @@ -1183,7 +1183,7 @@ func (b *baseBinder) bindFuncExprImplByAstExpr(name string, astArgs []tree.Expr, } // bind ast function's args - var args []*Expr + var args []*plan.Expr if name == "bit_cast" { bitCastExpr := astArgs[0].(*tree.BitCastExpr) binExpr, err := b.impl.BindExpr(bitCastExpr.Expr, depth, false) @@ -1195,14 +1195,14 @@ func (b *baseBinder) bindFuncExprImplByAstExpr(name string, astArgs []tree.Expr, if err != nil { return nil, err } - typeExpr := &Expr{ + typeExpr := &plan.Expr{ Typ: typ, Expr: &plan.Expr_T{ T: &plan.TargetType{}, }, } - args = []*Expr{binExpr, typeExpr} + args = []*plan.Expr{binExpr, typeExpr} } else if name == "serial_extract" { serialExtractExpr := astArgs[0].(*tree.SerialExtractExpr) @@ -1223,7 +1223,7 @@ func (b *baseBinder) bindFuncExprImplByAstExpr(name string, astArgs []tree.Expr, if err != nil { return nil, err } - typeExpr := &Expr{ + typeExpr := &plan.Expr{ Typ: typ, Expr: &plan.Expr_T{ T: &plan.TargetType{}, @@ -1231,9 +1231,9 @@ func (b *baseBinder) bindFuncExprImplByAstExpr(name string, astArgs []tree.Expr, } // 4. return [serialExpr, idxExpr, typeExpr]. Used in list_builtIn.go - args = []*Expr{serialExpr, idxExpr, typeExpr} + args = []*plan.Expr{serialExpr, idxExpr, typeExpr} } else { - args = make([]*Expr, len(astArgs)) + args = make([]*plan.Expr, len(astArgs)) for idx, arg := range astArgs { expr, err := b.impl.BindExpr(arg, depth, false) if err != nil { @@ -1329,7 +1329,7 @@ func bindFuncExprImplUdf(b *baseBinder, name string, udf *function.Udf, args []t } func (b *baseBinder) bindPythonUdf(udf *function.Udf, astArgs []tree.Expr, depth int32) (*plan.Expr, error) { - args := make([]*Expr, 2*len(astArgs)+2) + args := make([]*plan.Expr, 2*len(astArgs)+2) // python udf self info and query context args[0] = udf.GetPlanExpr() @@ -1346,17 +1346,17 @@ func (b *baseBinder) bindPythonUdf(udf *function.Udf, astArgs []tree.Expr, depth // function args fArgTypes := udf.GetArgsPlanType() for i, t := range fArgTypes { - args[len(astArgs)+i+1] = &Expr{Typ: *t} + args[len(astArgs)+i+1] = &plan.Expr{Typ: *t} } // function ret fRetType := udf.GetRetPlanType() - args[2*len(astArgs)+1] = &Expr{Typ: *fRetType} + args[2*len(astArgs)+1] = &plan.Expr{Typ: *fRetType} return BindFuncExprImplByPlanExpr(b.GetContext(), "python_user_defined_function", args) } -func bindFuncExprAndConstFold(ctx context.Context, proc *process.Process, name string, args []*Expr) (*plan.Expr, error) { +func bindFuncExprAndConstFold(ctx context.Context, proc *process.Process, name string, args []*plan.Expr) (*plan.Expr, error) { retExpr, err := BindFuncExprImplByPlanExpr(ctx, name, args) if err != nil { return nil, err @@ -1365,7 +1365,7 @@ func bindFuncExprAndConstFold(ctx context.Context, proc *process.Process, name s switch retExpr.GetF().GetFunc().GetObjName() { case "+", "-", "*", "/", "div", "%", "mod", "unary_minus", "unary_plus", "unary_tilde", "cast", "serial", "serial_full": if proc != nil { - tmpexpr, _ := ConstantFold(batch.EmptyForConstFoldBatch, DeepCopyExpr(retExpr), proc, false, true) + tmpexpr, _ := ConstantFold(batch.EmptyForConstFoldBatch, plan.DeepCopyExpr(retExpr), proc, false, true) if tmpexpr != nil { retExpr = tmpexpr } @@ -1467,7 +1467,7 @@ func bindFuncExprAndConstFold(ctx context.Context, proc *process.Process, name s return nil, moerr.NewInvalidInput(ctx, "2nd and 3rd arguments not comparable") } if !rangeCheckVal.GetBval() { - retExpr = DeepCopyExpr(kAlwaysFalseExpr) + retExpr = plan.DeepCopyExpr(kAlwaysFalseExpr) } else { retExpr, _ = ConstantFold(batch.EmptyForConstFoldBatch, retExpr, proc, false, true) } @@ -1477,7 +1477,7 @@ func bindFuncExprAndConstFold(ctx context.Context, proc *process.Process, name s between_fallback: fnArgs := retExpr.GetF().Args - leftFn, err := BindFuncExprImplByPlanExpr(ctx, ">=", []*plan.Expr{DeepCopyExpr(fnArgs[0]), fnArgs[1]}) + leftFn, err := BindFuncExprImplByPlanExpr(ctx, ">=", []*plan.Expr{plan.DeepCopyExpr(fnArgs[0]), fnArgs[1]}) if err != nil { return nil, err } @@ -1498,7 +1498,7 @@ between_fallback: return retExpr, nil } -func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) (*plan.Expr, error) { +func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*plan.Expr) (*plan.Expr, error) { var err error // deal with some special function @@ -1507,7 +1507,7 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) if len(args) == 1 { if listExpr, ok := args[0].Expr.(*plan.Expr_List); ok { for i, subExpr := range listExpr.List.List { - newSubExpr, err := BindFuncExprImplByPlanExpr(ctx, "serial", []*Expr{subExpr}) + newSubExpr, err := BindFuncExprImplByPlanExpr(ctx, "serial", []*plan.Expr{subExpr}) if err != nil { return nil, err } @@ -1856,7 +1856,7 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) rightList.List = inExprList exprType := makePlan2Type(&returnType) exprType.NotNullable = function.DeduceNotNullable(funcID, args) - newExpr = &Expr{ + newExpr = &plan.Expr{ Typ: exprType, Expr: &plan.Expr_F{ F: &plan.Function{ @@ -1872,14 +1872,14 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) //expand the in list to col=a or col=b or ...... if name == "in" { for _, expr := range orExprList { - tmpExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*Expr{DeepCopyExpr(args[0]), expr}) + tmpExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*plan.Expr{plan.DeepCopyExpr(args[0]), expr}) if err != nil { return nil, err } if newExpr == nil { newExpr = tmpExpr } else { - newExpr, err = BindFuncExprImplByPlanExpr(ctx, "or", []*Expr{newExpr, tmpExpr}) + newExpr, err = BindFuncExprImplByPlanExpr(ctx, "or", []*plan.Expr{newExpr, tmpExpr}) if err != nil { return nil, err } @@ -1887,14 +1887,14 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) } } else { for _, expr := range orExprList { - tmpExpr, err := BindFuncExprImplByPlanExpr(ctx, "!=", []*Expr{DeepCopyExpr(args[0]), expr}) + tmpExpr, err := BindFuncExprImplByPlanExpr(ctx, "!=", []*plan.Expr{plan.DeepCopyExpr(args[0]), expr}) if err != nil { return nil, err } if newExpr == nil { newExpr = tmpExpr } else { - newExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*Expr{newExpr, tmpExpr}) + newExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*plan.Expr{newExpr, tmpExpr}) if err != nil { return nil, err } @@ -1929,7 +1929,7 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) fGet, err := function.GetFunctionByName(ctx, name, argsType) if err != nil { if name == "between" { - leftFn, err := BindFuncExprImplByPlanExpr(ctx, ">=", []*plan.Expr{DeepCopyExpr(args[0]), args[1]}) + leftFn, err := BindFuncExprImplByPlanExpr(ctx, ">=", []*plan.Expr{plan.DeepCopyExpr(args[0]), args[1]}) if err != nil { return nil, err } @@ -2179,7 +2179,7 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) // return new expr Typ := makePlan2Type(&returnType) Typ.NotNullable = function.DeduceNotNullable(funcID, args) - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ Func: getFunctionObjRef(funcID, name), @@ -2190,19 +2190,19 @@ func BindFuncExprImplByPlanExpr(ctx context.Context, name string, args []*Expr) }, nil } -func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ Type) (*Expr, error) { +func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ plan.Type) (*plan.Expr, error) { // over_int64_err := moerr.NewInternalError(b.GetContext(), "", "Constants over int64 will support in future version.") // rewrite the hexnum process logic // for float64, if the number is over 1<<53-1,it will lost, so if typ is float64, // don't cast 0xXXXX as float64, use the uint64 - returnDecimalExpr := func(val string) (*Expr, error) { + returnDecimalExpr := func(val string) (*plan.Expr, error) { if !typ.IsEmpty() { return appendCastBeforeExpr(b.GetContext(), makePlan2StringConstExprWithType(val), typ) } return makePlan2DecimalExprWithType(b.GetContext(), val) } - returnHexNumExpr := func(val string, isBin ...bool) (*Expr, error) { + returnHexNumExpr := func(val string, isBin ...bool) (*plan.Expr, error) { if !typ.IsEmpty() { isFloat := typ.Id == int32(types.T_float32) || typ.Id == int32(types.T_float64) return appendCastBeforeExpr(b.GetContext(), makePlan2StringConstExprWithType(val, isBin[0]), typ, isBin[0], isFloat) @@ -2239,9 +2239,9 @@ func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ Type) (*Expr, error) { if err != nil { return nil, err } - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal64Val{ Decimal64Val: &plan.Decimal64{A: int64(d64)}, @@ -2258,9 +2258,9 @@ func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ Type) (*Expr, error) { } a := int64(d128.B0_63) b := int64(d128.B64_127) - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal128Val{ Decimal128Val: &plan.Decimal128{A: a, B: b}, @@ -2286,9 +2286,9 @@ func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ Type) (*Expr, error) { if useDecimal64 { d64 := types.Decimal64(d128.B0_63) - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal64Val{ Decimal64Val: &plan.Decimal64{A: int64(d64)}, @@ -2307,9 +2307,9 @@ func (b *baseBinder) bindNumVal(astExpr *tree.NumVal, typ Type) (*Expr, error) { // Use decimal128 for higher precision a := int64(d128.B0_63) b := int64(d128.B64_127) - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal128Val{ Decimal128Val: &plan.Decimal128{A: a, B: b}, @@ -2367,7 +2367,7 @@ func (b *baseBinder) GetContext() context.Context { return b.sysCtx } // --- util functions ---- -func appendCastBeforeExpr(ctx context.Context, expr *Expr, toType Type, isBin ...bool) (*Expr, error) { +func appendCastBeforeExpr(ctx context.Context, expr *plan.Expr, toType plan.Type, isBin ...bool) (*plan.Expr, error) { toType.NotNullable = expr.Typ.NotNullable argsType := []types.Type{ makeTypeByPlan2Expr(expr), @@ -2382,11 +2382,11 @@ func appendCastBeforeExpr(ctx context.Context, expr *Expr, toType Type, isBin .. if len(isBin) == 2 && isBin[0] && isBin[1] { typ.Id = int32(types.T_uint64) } - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ Func: getFunctionObjRef(fGet.GetEncodedOverloadID(), "cast"), - Args: []*Expr{ + Args: []*plan.Expr{ expr, { Typ: typ, @@ -2401,7 +2401,7 @@ func appendCastBeforeExpr(ctx context.Context, expr *Expr, toType Type, isBin .. }, nil } -func resetDateFunctionArgs(ctx context.Context, dateExpr *Expr, intervalExpr *Expr) ([]*Expr, error) { +func resetDateFunctionArgs(ctx context.Context, dateExpr *plan.Expr, intervalExpr *plan.Expr) ([]*plan.Expr, error) { firstExpr := intervalExpr.GetList().List[0] secondExpr := intervalExpr.GetList().List[1] @@ -2445,7 +2445,7 @@ func resetDateFunctionArgs(ctx context.Context, dateExpr *Expr, intervalExpr *Ex } } } - return []*Expr{ + return []*plan.Expr{ dateExpr, makePlan2Int64ConstExprWithType(returnNum), makePlan2Int64ConstExprWithType(int64(returnType)), @@ -2551,7 +2551,7 @@ func resetDateFunctionArgs(ctx context.Context, dateExpr *Expr, intervalExpr *Ex default: finalValue = int64(floatVal) } - return []*Expr{ + return []*plan.Expr{ dateExpr, makePlan2Int64ConstExprWithType(finalValue), // Use MicroSecond type since we've converted to microseconds @@ -2565,14 +2565,14 @@ func resetDateFunctionArgs(ctx context.Context, dateExpr *Expr, intervalExpr *Ex return nil, err } - return []*Expr{ + return []*plan.Expr{ dateExpr, numberExpr, makePlan2Int64ConstExprWithType(int64(intervalType)), }, nil } -func resetDateFunction(ctx context.Context, dateExpr *Expr, intervalExpr *Expr) ([]*Expr, error) { +func resetDateFunction(ctx context.Context, dateExpr *plan.Expr, intervalExpr *plan.Expr) ([]*plan.Expr, error) { // MySQL behavior: NULL literal as interval argument should return syntax error if isNullExpr(intervalExpr) { return nil, moerr.NewSyntaxError(ctx, "You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'null)' at line 1") @@ -2582,15 +2582,15 @@ func resetDateFunction(ctx context.Context, dateExpr *Expr, intervalExpr *Expr) return resetDateFunctionArgs(ctx, dateExpr, intervalExpr) } list := &plan.ExprList{ - List: make([]*Expr, 2), + List: make([]*plan.Expr, 2), } list.List[0] = intervalExpr strType := &plan.Type{ Id: int32(types.T_char), } - strExpr := &Expr{ + strExpr := &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Value: &plan.Literal_Sval{ Sval: "day", }, @@ -2602,17 +2602,17 @@ func resetDateFunction(ctx context.Context, dateExpr *Expr, intervalExpr *Expr) expr := &plan.Expr_List{ List: list, } - listExpr := &Expr{ + listExpr := &plan.Expr{ Expr: expr, } return resetDateFunctionArgs(ctx, dateExpr, listExpr) } -func resetIntervalFunction(ctx context.Context, intervalExpr *Expr) ([]*Expr, error) { +func resetIntervalFunction(ctx context.Context, intervalExpr *plan.Expr) ([]*plan.Expr, error) { return resetIntervalFunctionArgs(ctx, intervalExpr) } -func resetIntervalFunctionArgs(ctx context.Context, intervalExpr *Expr) ([]*Expr, error) { +func resetIntervalFunctionArgs(ctx context.Context, intervalExpr *plan.Expr) ([]*plan.Expr, error) { firstExpr := intervalExpr.GetList().List[0] secondExpr := intervalExpr.GetList().List[1] @@ -2639,7 +2639,7 @@ func resetIntervalFunctionArgs(ctx context.Context, intervalExpr *Expr) ([]*Expr returnNum = math.MaxInt64 returnType = intervalType } - return []*Expr{ + return []*plan.Expr{ makePlan2Int64ConstExprWithType(returnNum), makePlan2Int64ConstExprWithType(int64(returnType)), }, nil @@ -2707,7 +2707,7 @@ func resetIntervalFunctionArgs(ctx context.Context, intervalExpr *Expr) ([]*Expr default: finalValue = int64(floatVal) } - return []*Expr{ + return []*plan.Expr{ makePlan2Int64ConstExprWithType(finalValue), // Use MicroSecond type since we've converted to microseconds makePlan2Int64ConstExprWithType(int64(types.MicroSecond)), @@ -2720,7 +2720,7 @@ func resetIntervalFunctionArgs(ctx context.Context, intervalExpr *Expr) ([]*Expr return nil, err } - return []*Expr{ + return []*plan.Expr{ numberExpr, makePlan2Int64ConstExprWithType(int64(intervalType)), }, nil diff --git a/pkg/sql/plan/base_binder_date_sub_decimal_test.go b/pkg/sql/planner/base_binder_date_sub_decimal_test.go similarity index 99% rename from pkg/sql/plan/base_binder_date_sub_decimal_test.go rename to pkg/sql/planner/base_binder_date_sub_decimal_test.go index 567c78bd52f61..713c3683877c6 100644 --- a/pkg/sql/plan/base_binder_date_sub_decimal_test.go +++ b/pkg/sql/planner/base_binder_date_sub_decimal_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/base_binder_reset_interval_test.go b/pkg/sql/planner/base_binder_reset_interval_test.go similarity index 99% rename from pkg/sql/plan/base_binder_reset_interval_test.go rename to pkg/sql/planner/base_binder_reset_interval_test.go index dc44101840e22..8e035e866db92 100644 --- a/pkg/sql/plan/base_binder_reset_interval_test.go +++ b/pkg/sql/planner/base_binder_reset_interval_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/base_binder_round_precision_test.go b/pkg/sql/planner/base_binder_round_precision_test.go similarity index 99% rename from pkg/sql/plan/base_binder_round_precision_test.go rename to pkg/sql/planner/base_binder_round_precision_test.go index cb63fa77ca783..de1901844d0a9 100644 --- a/pkg/sql/plan/base_binder_round_precision_test.go +++ b/pkg/sql/planner/base_binder_round_precision_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/base_binder_timestampadd_test.go b/pkg/sql/planner/base_binder_timestampadd_test.go similarity index 99% rename from pkg/sql/plan/base_binder_timestampadd_test.go rename to pkg/sql/planner/base_binder_timestampadd_test.go index 0f304a8e9c109..08a1adc872285 100644 --- a/pkg/sql/plan/base_binder_timestampadd_test.go +++ b/pkg/sql/planner/base_binder_timestampadd_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/bind_context.go b/pkg/sql/planner/bind_context.go similarity index 99% rename from pkg/sql/plan/bind_context.go rename to pkg/sql/planner/bind_context.go index de380a640586c..9dd78123df4ac 100644 --- a/pkg/sql/plan/bind_context.go +++ b/pkg/sql/planner/bind_context.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/bind_delete.go b/pkg/sql/planner/bind_delete.go similarity index 96% rename from pkg/sql/plan/bind_delete.go rename to pkg/sql/planner/bind_delete.go index 43e153a9764ff..4103d99def865 100644 --- a/pkg/sql/plan/bind_delete.go +++ b/pkg/sql/planner/bind_delete.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -266,15 +266,15 @@ func (builder *QueryBuilder) bindDelete(ctx CompilerContext, stmt *tree.Delete, partitionPos = colName2Idx[i][colName] } updateCtx := &plan.UpdateCtx{ - TableDef: DeepCopyTableDef(tableDef, true), - ObjRef: DeepCopyObjectRef(dmlCtx.objRefs[i]), + TableDef: plan.DeepCopyTableDef(tableDef, true), + ObjRef: plan.DeepCopyObjectRef(dmlCtx.objRefs[i]), } for _, col := range tableDef.Cols { if col.Name == tableDef.Pkey.PkeyColName { lockTarget := &plan.LockTarget{ TableId: tableDef.TblId, - ObjRef: DeepCopyObjectRef(dmlCtx.objRefs[i]), + ObjRef: plan.DeepCopyObjectRef(dmlCtx.objRefs[i]), PrimaryColIdxInBat: pkPos, PrimaryColRelPos: selectNodeTag, PrimaryColTyp: col.Typ, @@ -323,7 +323,7 @@ func (builder *QueryBuilder) bindDelete(ctx CompilerContext, stmt *tree.Delete, if col.Name == idxNode.TableDef.Pkey.PkeyColName { lockTargets = append(lockTargets, &plan.LockTarget{ TableId: idxNode.TableDef.TblId, - ObjRef: DeepCopyObjectRef(idxNode.ObjRef), + ObjRef: plan.DeepCopyObjectRef(idxNode.ObjRef), PrimaryColIdxInBat: pkPos, PrimaryColRelPos: idxNode.BindingTags[0], PrimaryColTyp: col.Typ, @@ -334,8 +334,8 @@ func (builder *QueryBuilder) bindDelete(ctx CompilerContext, stmt *tree.Delete, } dmlNode.UpdateCtxList = append(dmlNode.UpdateCtxList, &plan.UpdateCtx{ - TableDef: DeepCopyTableDef(idxNode.TableDef, true), - ObjRef: DeepCopyObjectRef(idxNode.ObjRef), + TableDef: plan.DeepCopyTableDef(idxNode.TableDef, true), + ObjRef: plan.DeepCopyObjectRef(idxNode.ObjRef), DeleteCols: []plan.ColRef{ { RelPos: idxNode.BindingTags[0], diff --git a/pkg/sql/plan/bind_insert.go b/pkg/sql/planner/bind_insert.go similarity index 98% rename from pkg/sql/plan/bind_insert.go rename to pkg/sql/planner/bind_insert.go index 04bdb26f35049..e95bf7479d19c 100644 --- a/pkg/sql/plan/bind_insert.go +++ b/pkg/sql/planner/bind_insert.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -43,7 +43,7 @@ func (builder *QueryBuilder) bindInsert(stmt *tree.Insert, bindCtx *BindContext) builder.isRestoreByTs = true } oldSnapshot := builder.compCtx.GetSnapshot() - builder.compCtx.SetSnapshot(&Snapshot{ + builder.compCtx.SetSnapshot(&plan.Snapshot{ Tenant: &plan.SnapshotTenant{ TenantName: "xxx", TenantID: stmt.FromDataTenantID, @@ -179,7 +179,7 @@ func (builder *QueryBuilder) appendDedupAndMultiUpdateNodesForBindInsert( for _, col := range tableDef.Cols { if col.OnUpdate != nil && col.OnUpdate.Expr != nil && updateExprs[col.Name] == nil { - newDefExpr := DeepCopyExpr(col.OnUpdate.Expr) + newDefExpr := plan.DeepCopyExpr(col.OnUpdate.Expr) err = replaceFuncId(builder.GetContext(), newDefExpr) if err != nil { return 0, err @@ -220,7 +220,7 @@ func (builder *QueryBuilder) appendDedupAndMultiUpdateNodesForBindInsert( if col.Name == pkName && pkName != catalog.FakePrimaryKeyColName { lockTarget := &plan.LockTarget{ TableId: tableDef.TblId, - ObjRef: DeepCopyObjectRef(objRef), + ObjRef: plan.DeepCopyObjectRef(objRef), PrimaryColIdxInBat: colName2Idx[tableDef.Name+"."+col.Name], PrimaryColRelPos: selectTag, PrimaryColTyp: col.Typ, @@ -588,7 +588,7 @@ func (builder *QueryBuilder) appendDedupAndMultiUpdateNodesForBindInsert( }, } - rightExpr := DeepCopyExpr(appendedUniqueProjs[idxTableDefs[i].Name+"."+catalog.IndexTableIndexColName]) + rightExpr := plan.DeepCopyExpr(appendedUniqueProjs[idxTableDefs[i].Name+"."+catalog.IndexTableIndexColName]) joinCond, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ leftExpr, @@ -974,7 +974,7 @@ func (builder *QueryBuilder) appendDedupAndMultiUpdateNodesForBindInsert( // If the INSERT statement specifies the columns, it validates the column names against the table definition // and returns an error if any of the column names are invalid. // The function returns the list of insert columns and an error, if any. -func (builder *QueryBuilder) getInsertColsFromStmt(astCols tree.IdentifierList, tableDef *TableDef) ([]string, error) { +func (builder *QueryBuilder) getInsertColsFromStmt(astCols tree.IdentifierList, tableDef *plan.TableDef) ([]string, error) { var insertColNames []string colToIdx := make(map[string]int) for i, col := range tableDef.Cols { @@ -1118,9 +1118,9 @@ func (builder *QueryBuilder) initInsertReplaceStmt(bindCtx *BindContext, astRows func (builder *QueryBuilder) appendNodesForInsertStmt( bindCtx *BindContext, lastNodeID int32, - tableDef *TableDef, - objRef *ObjectRef, - insertColToExpr map[string]*Expr, + tableDef *plan.TableDef, + objRef *plan.ObjectRef, + insertColToExpr map[string]*plan.Expr, ) (int32, map[string]int32, []bool, error) { colName2Idx := make(map[string]int32) hasAutoCol := false @@ -1162,7 +1162,7 @@ func (builder *QueryBuilder) appendNodesForInsertStmt( //args := make([]*plan.Expr, len(tableDef.Pkey.Names)) // //for k, part := range tableDef.Pkey.Names { - // args[k] = DeepCopyExpr(insertColToExpr[part]) + // args[k] = plan.DeepCopyExpr(insertColToExpr[part]) //} // //compPkeyExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", args) @@ -1181,7 +1181,7 @@ func (builder *QueryBuilder) appendNodesForInsertStmt( //args := make([]*plan.Expr, len(names)) // //for k, part := range names { - // args[k] = DeepCopyExpr(insertColToExpr[part]) + // args[k] = plan.DeepCopyExpr(insertColToExpr[part]) //} // //clusterByExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial_full", args) @@ -1281,7 +1281,7 @@ func (builder *QueryBuilder) appendNodesForInsertStmt( func (builder *QueryBuilder) buildValueScan( isAllDefault bool, bindCtx *BindContext, - tableDef *TableDef, + tableDef *plan.TableDef, stmt *tree.ValuesClause, colNames []string, ) (int32, error) { diff --git a/pkg/sql/plan/bind_load.go b/pkg/sql/planner/bind_load.go similarity index 98% rename from pkg/sql/plan/bind_load.go rename to pkg/sql/planner/bind_load.go index 3d41757ff5904..07dbc5d680502 100644 --- a/pkg/sql/plan/bind_load.go +++ b/pkg/sql/planner/bind_load.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "encoding/json" @@ -59,7 +59,7 @@ func (builder *QueryBuilder) bindExternalScan( return -1, nil, err } - tableDef := DeepCopyTableDef(dmlCtx.tableDefs[0], true) + tableDef := plan.DeepCopyTableDef(dmlCtx.tableDefs[0], true) objRef := dmlCtx.objRefs[0] // load with columnlist will copy a new tableDef @@ -68,7 +68,7 @@ func (builder *QueryBuilder) bindExternalScan( if stmt.Param.ScanType != tree.INLINE || len(stmt.Param.Tail.ColumnList) > 0 { if len(stmt.Param.Tail.ColumnList) > 0 { colToIndex := make(map[string]int32, 0) - var newCols []*ColDef + var newCols []*plan.ColDef colPos := 0 for i, col := range stmt.Param.Tail.ColumnList { diff --git a/pkg/sql/plan/bind_replace.go b/pkg/sql/planner/bind_replace.go similarity index 98% rename from pkg/sql/plan/bind_replace.go rename to pkg/sql/planner/bind_replace.go index 597d1667353fc..d58d66182e140 100644 --- a/pkg/sql/plan/bind_replace.go +++ b/pkg/sql/planner/bind_replace.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -599,9 +599,9 @@ func (builder *QueryBuilder) appendDedupAndMultiUpdateNodesForBindReplace( func (builder *QueryBuilder) appendNodesForReplaceStmt( bindCtx *BindContext, lastNodeID int32, - tableDef *TableDef, - objRef *ObjectRef, - insertColToExpr map[string]*Expr, + tableDef *plan.TableDef, + objRef *plan.ObjectRef, + insertColToExpr map[string]*plan.Expr, ) (int32, map[string]int32, []bool, error) { colName2Idx := make(map[string]int32) hasAutoCol := false @@ -643,7 +643,7 @@ func (builder *QueryBuilder) appendNodesForReplaceStmt( //args := make([]*plan.Expr, len(tableDef.Pkey.Names)) // //for k, part := range tableDef.Pkey.Names { - // args[k] = DeepCopyExpr(insertColToExpr[part]) + // args[k] = plan.DeepCopyExpr(insertColToExpr[part]) //} // //compPkeyExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", args) @@ -662,7 +662,7 @@ func (builder *QueryBuilder) appendNodesForReplaceStmt( //args := make([]*plan.Expr, len(names)) // //for k, part := range names { - // args[k] = DeepCopyExpr(insertColToExpr[part]) + // args[k] = plan.DeepCopyExpr(insertColToExpr[part]) //} // //clusterByExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial_full", args) @@ -738,7 +738,7 @@ func (builder *QueryBuilder) appendNodesForReplaceStmt( errMsg := fmt.Sprintf("bind insert err, can not find colName = %s", idxDef.Parts[k]) return 0, nil, nil, moerr.NewInternalError(builder.GetContext(), errMsg) } - args[k] = DeepCopyExpr(projList2[colPos]) + args[k] = plan.DeepCopyExpr(projList2[colPos]) } funcName := "serial" diff --git a/pkg/sql/plan/bind_update.go b/pkg/sql/planner/bind_update.go similarity index 99% rename from pkg/sql/plan/bind_update.go rename to pkg/sql/planner/bind_update.go index 3f692fcfbb5f4..e5aa2c11745d3 100644 --- a/pkg/sql/plan/bind_update.go +++ b/pkg/sql/planner/bind_update.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -221,7 +221,7 @@ func (builder *QueryBuilder) bindUpdate(stmt *tree.Update, bindCtx *BindContext) } } else { if col.OnUpdate != nil && col.OnUpdate.Expr != nil { - newDefExpr := DeepCopyExpr(col.OnUpdate.Expr) + newDefExpr := plan.DeepCopyExpr(col.OnUpdate.Expr) err = replaceFuncId(builder.GetContext(), newDefExpr) oldPos := oldColName2Idx[alias+"."+col.Name] diff --git a/pkg/sql/plan/binding.go b/pkg/sql/planner/binding.go similarity index 98% rename from pkg/sql/plan/binding.go rename to pkg/sql/planner/binding.go index e6855e36c4eac..ab29fe8f6c340 100644 --- a/pkg/sql/plan/binding.go +++ b/pkg/sql/planner/binding.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" diff --git a/pkg/sql/plan/build.go b/pkg/sql/planner/build.go similarity index 93% rename from pkg/sql/plan/build.go rename to pkg/sql/planner/build.go index e8c22c95dc4e1..74a6ee97a5fe2 100644 --- a/pkg/sql/plan/build.go +++ b/pkg/sql/planner/build.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -27,7 +27,7 @@ import ( v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" ) -func bindAndOptimizeSelectQuery(stmtType plan.Query_StatementType, ctx CompilerContext, stmt *tree.Select, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeSelectQuery(stmtType plan.Query_StatementType, ctx CompilerContext, stmt *tree.Select, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildSelectHistogram.Observe(time.Since(start).Seconds()) @@ -51,14 +51,14 @@ func bindAndOptimizeSelectQuery(stmtType plan.Query_StatementType, ctx CompilerC if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func bindAndOptimizeInsertQuery(ctx CompilerContext, stmt *tree.Insert, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeInsertQuery(ctx CompilerContext, stmt *tree.Insert, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildInsertHistogram.Observe(time.Since(start).Seconds()) @@ -86,14 +86,14 @@ func bindAndOptimizeInsertQuery(ctx CompilerContext, stmt *tree.Insert, isPrepar if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func bindAndOptimizeReplaceQuery(ctx CompilerContext, stmt *tree.Replace, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeReplaceQuery(ctx CompilerContext, stmt *tree.Replace, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildInsertHistogram.Observe(time.Since(start).Seconds()) @@ -121,14 +121,14 @@ func bindAndOptimizeReplaceQuery(ctx CompilerContext, stmt *tree.Replace, isPrep if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func bindAndOptimizeLoadQuery(ctx CompilerContext, stmt *tree.Load, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeLoadQuery(ctx CompilerContext, stmt *tree.Load, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { // return buildLoad(stmt, ctx, isPrepareStmt) start := time.Now() defer func() { @@ -156,14 +156,14 @@ func bindAndOptimizeLoadQuery(ctx CompilerContext, stmt *tree.Load, isPrepareStm if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func bindAndOptimizeDeleteQuery(ctx CompilerContext, stmt *tree.Delete, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeDeleteQuery(ctx CompilerContext, stmt *tree.Delete, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildDeleteHistogram.Observe(time.Since(start).Seconds()) @@ -190,14 +190,14 @@ func bindAndOptimizeDeleteQuery(ctx CompilerContext, stmt *tree.Delete, isPrepar if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func bindAndOptimizeUpdateQuery(ctx CompilerContext, stmt *tree.Update, isPrepareStmt bool, skipStats bool) (*Plan, error) { +func bindAndOptimizeUpdateQuery(ctx CompilerContext, stmt *tree.Update, isPrepareStmt bool, skipStats bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildDeleteHistogram.Observe(time.Since(start).Seconds()) @@ -224,14 +224,14 @@ func bindAndOptimizeUpdateQuery(ctx CompilerContext, stmt *tree.Update, isPrepar if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func buildExplainPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bool) (*Plan, error) { +func buildExplainPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildExplainHistogram.Observe(time.Since(start).Seconds()) @@ -262,15 +262,15 @@ func buildExplainPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bo return plan, nil } -func buildExplainAnalyze(ctx CompilerContext, stmt *tree.ExplainAnalyze, isPrepareStmt bool) (*Plan, error) { +func buildExplainAnalyze(ctx CompilerContext, stmt *tree.ExplainAnalyze, isPrepareStmt bool) (*plan.Plan, error) { return buildExplainPlan(ctx, stmt.Statement, isPrepareStmt) } -func buildExplainPhyPlan(ctx CompilerContext, stmt *tree.ExplainPhyPlan, isPrepareStmt bool) (*Plan, error) { +func buildExplainPhyPlan(ctx CompilerContext, stmt *tree.ExplainPhyPlan, isPrepareStmt bool) (*plan.Plan, error) { return buildExplainPlan(ctx, stmt.Statement, isPrepareStmt) } -func BuildPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bool) (*Plan, error) { +func BuildPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildPlanHistogram.Observe(time.Since(start).Seconds()) @@ -424,12 +424,12 @@ func BuildPlan(ctx CompilerContext, stmt tree.Statement, isPrepareStmt bool) (*P } // GetResultColumnsFromPlan -func GetResultColumnsFromPlan(p *Plan) []*ColDef { - getResultColumnsByProjectionlist := func(query *Query) []*ColDef { +func GetResultColumnsFromPlan(p *plan.Plan) []*plan.ColDef { + getResultColumnsByProjectionlist := func(query *plan.Query) []*plan.ColDef { lastNode := query.Nodes[query.Steps[len(query.Steps)-1]] - columns := make([]*ColDef, len(lastNode.ProjectList)) + columns := make([]*plan.ColDef, len(lastNode.ProjectList)) for idx, expr := range lastNode.ProjectList { - columns[idx] = &ColDef{ + columns[idx] = &plan.ColDef{ Name: query.Headings[idx], Typ: expr.Typ, } @@ -465,7 +465,7 @@ func GetResultColumnsFromPlan(p *Plan) []*ColDef { Id: int32(types.T_varchar), Width: 1024, } - return []*ColDef{ + return []*plan.ColDef{ {Typ: typ, Name: "Variable_name"}, {Typ: typ, Name: "Value"}, } diff --git a/pkg/sql/plan/build_alter_add_column.go b/pkg/sql/planner/build_alter_add_column.go similarity index 95% rename from pkg/sql/plan/build_alter_add_column.go rename to pkg/sql/planner/build_alter_add_column.go index 881b9a4926a0e..1f822ac87ebf5 100644 --- a/pkg/sql/plan/build_alter_add_column.go +++ b/pkg/sql/planner/build_alter_add_column.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -75,20 +75,20 @@ func AddColumn( } // checkModifyNewColumn Check the position information of the newly formed column and place the new column in the target location -func handleAddColumnPosition(ctx context.Context, tableDef *TableDef, newCol *ColDef, pos *tree.ColumnPosition) error { +func handleAddColumnPosition(ctx context.Context, tableDef *plan.TableDef, newCol *plan.ColDef, pos *tree.ColumnPosition) error { if pos != nil && pos.Typ != tree.ColumnPositionNone { targetPos, err := findPositionRelativeColumn(ctx, tableDef.Cols, pos) if err != nil { return err } - tableDef.Cols = append(tableDef.Cols[:targetPos], append([]*ColDef{newCol}, tableDef.Cols[targetPos:]...)...) + tableDef.Cols = append(tableDef.Cols[:targetPos], append([]*plan.ColDef{newCol}, tableDef.Cols[targetPos:]...)...) } else { tableDef.Cols = append(tableDef.Cols, newCol) } return nil } -func buildAddColumnAndConstraint(ctx CompilerContext, alterPlan *plan.AlterTable, specNewColumn *tree.ColumnTableDef, colType plan.Type) (*ColDef, error) { +func buildAddColumnAndConstraint(ctx CompilerContext, alterPlan *plan.AlterTable, specNewColumn *tree.ColumnTableDef, colType plan.Type) (*plan.ColDef, error) { newColName := specNewColumn.Name.ColName() newColNameOrigin := specNewColumn.Name.ColNameOrigin() // Check if the new column name is valid and conflicts with internal hidden columns @@ -97,7 +97,7 @@ func buildAddColumnAndConstraint(ctx CompilerContext, alterPlan *plan.AlterTable return nil, err } - newCol := &ColDef{ + newCol := &plan.ColDef{ ColId: math.MaxUint64, //Primary: originalCol.Primary, //NotNull: originalCol.NotNull, @@ -125,7 +125,7 @@ func buildAddColumnAndConstraint(ctx CompilerContext, alterPlan *plan.AlterTable } else if alterPlan.CopyTableDef.ClusterBy != nil && alterPlan.CopyTableDef.ClusterBy.Name != "" { return nil, moerr.NewNotSupported(ctx.GetContext(), "cluster by with primary key is not support") } else { - alterPlan.CopyTableDef.Pkey = &PrimaryKeyDef{ + alterPlan.CopyTableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{newColName}, PkeyColName: newColName, } @@ -267,7 +267,7 @@ func checkUniqueKeyPartType(ctx context.Context, colType plan.Type, columnName s return nil } -func checkAddColumWithUniqueKey(ctx context.Context, tableDef *TableDef, uniKey *tree.UniqueIndex) (*plan.IndexDef, error) { +func checkAddColumWithUniqueKey(ctx context.Context, tableDef *plan.TableDef, uniKey *tree.UniqueIndex) (*plan.IndexDef, error) { indexName := uniKey.GetIndexName() if strings.EqualFold(indexName, PrimaryKeyName) { return nil, moerr.NewErrWrongNameForIndex(ctx, indexName) @@ -305,7 +305,7 @@ func checkAddColumWithUniqueKey(ctx context.Context, tableDef *TableDef, uniKey // findPositionRelativeColumn returns a position relative to the position of the add/modify/change column. func findPositionRelativeColumn( ctx context.Context, - cols []*ColDef, + cols []*plan.ColDef, pos *tree.ColumnPosition, ) (int, error) { position := len(cols) @@ -378,7 +378,7 @@ func DropColumn( return column.Primary, nil } -func checkVisibleColumnCnt(ctx context.Context, tblInfo *TableDef, addCount, dropCount int) error { +func checkVisibleColumnCnt(ctx context.Context, tblInfo *plan.TableDef, addCount, dropCount int) error { visibleColumCnt := 0 for _, column := range tblInfo.Cols { if !column.Hidden { @@ -395,7 +395,7 @@ func checkVisibleColumnCnt(ctx context.Context, tblInfo *TableDef, addCount, dro return moerr.NewErrCantRemoveAllFields(ctx) } -func handleDropColumnWithIndex(ctx context.Context, colName string, tbInfo *TableDef) error { +func handleDropColumnWithIndex(ctx context.Context, colName string, tbInfo *plan.TableDef) error { for i := 0; i < len(tbInfo.Indexes); i++ { indexInfo := tbInfo.Indexes[i] indexInfo.Parts = RemoveIf[string](indexInfo.Parts, func(t string) bool { @@ -450,7 +450,7 @@ func handleDropColumnWithIndex(ctx context.Context, colName string, tbInfo *Tabl return nil } -func handleDropColumnWithPrimaryKey(ctx context.Context, colName string, tbInfo *TableDef) error { +func handleDropColumnWithPrimaryKey(ctx context.Context, colName string, tbInfo *plan.TableDef) error { if tbInfo.Pkey != nil && tbInfo.Pkey.PkeyColName == catalog.FakePrimaryKeyColName { return nil } else { @@ -474,7 +474,7 @@ func handleDropColumnWithPrimaryKey(ctx context.Context, colName string, tbInfo } } -func checkDropColumnWithForeignKey(ctx CompilerContext, tbInfo *TableDef, targetCol *ColDef) error { +func checkDropColumnWithForeignKey(ctx CompilerContext, tbInfo *plan.TableDef, targetCol *plan.ColDef) error { colName := targetCol.Name for _, fkInfo := range tbInfo.Fkeys { for _, colId := range fkInfo.Cols { @@ -510,15 +510,15 @@ func checkDropColumnWithForeignKey(ctx CompilerContext, tbInfo *TableDef, target } // checkModifyNewColumn Check the position information of the newly formed column and place the new column in the target location -func handleDropColumnPosition(ctx context.Context, tableDef *TableDef, col *ColDef) error { - tableDef.Cols = RemoveIf[*ColDef](tableDef.Cols, func(t *ColDef) bool { +func handleDropColumnPosition(ctx context.Context, tableDef *plan.TableDef, col *plan.ColDef) error { + tableDef.Cols = RemoveIf[*plan.ColDef](tableDef.Cols, func(t *plan.ColDef) bool { return t.Name == col.Name }) return nil } // handleDropColumnWithClusterBy Process the cluster by table. If the cluster by key name is deleted, proceed with the process -func handleDropColumnWithClusterBy(ctx context.Context, copyTableDef *TableDef, originCol *ColDef) error { +func handleDropColumnWithClusterBy(ctx context.Context, copyTableDef *plan.TableDef, originCol *plan.ColDef) error { if copyTableDef.ClusterBy != nil && copyTableDef.ClusterBy.Name != "" { clusterBy := copyTableDef.ClusterBy var clNames []string diff --git a/pkg/sql/plan/build_alter_add_column_test.go b/pkg/sql/planner/build_alter_add_column_test.go similarity index 89% rename from pkg/sql/plan/build_alter_add_column_test.go rename to pkg/sql/planner/build_alter_add_column_test.go index 944edc8f57025..ab9b4930bd234 100644 --- a/pkg/sql/plan/build_alter_add_column_test.go +++ b/pkg/sql/planner/build_alter_add_column_test.go @@ -12,18 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" "testing" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/stretchr/testify/require" ) func TestDropColumnWithIndex(t *testing.T) { - var def TableDef - def.Indexes = []*IndexDef{ + var def plan.TableDef + def.Indexes = []*plan.IndexDef{ {IndexName: "idx", IndexAlgo: "fulltext", TableExist: true, diff --git a/pkg/sql/plan/build_alter_add_primarykey.go b/pkg/sql/planner/build_alter_add_primarykey.go similarity index 95% rename from pkg/sql/plan/build_alter_add_primarykey.go rename to pkg/sql/planner/build_alter_add_primarykey.go index e23ca40eb2110..2500d4c752922 100644 --- a/pkg/sql/plan/build_alter_add_primarykey.go +++ b/pkg/sql/planner/build_alter_add_primarykey.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" @@ -62,7 +62,7 @@ func AddPrimaryKey(ctx CompilerContext, alterPlan *plan.AlterTable, spec *tree.P col.Primary = true col.NotNull = true col.Default.NullAbility = false - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: primaryKeys, PkeyColName: pkeyName, } @@ -87,7 +87,7 @@ func AddPrimaryKey(ctx CompilerContext, alterPlan *plan.AlterTable, spec *tree.P colDef.Primary = true tableDef.Cols = append(tableDef.Cols, colDef) - pkeyDef := &PrimaryKeyDef{ + pkeyDef := &plan.PrimaryKeyDef{ Names: primaryKeys, PkeyColName: pkeyName, CompPkeyCol: colDef, @@ -110,7 +110,7 @@ func DropPrimaryKey(ctx CompilerContext, alterPlan *plan.AlterTable, alterCtx *A } } } else { - tableDef.Cols = RemoveIf[*ColDef](tableDef.Cols, func(coldef *ColDef) bool { + tableDef.Cols = RemoveIf[*plan.ColDef](tableDef.Cols, func(coldef *plan.ColDef) bool { return coldef.Hidden && pkey.PkeyColName == coldef.Name }) } diff --git a/pkg/sql/plan/build_alter_change_column.go b/pkg/sql/planner/build_alter_change_column.go similarity index 98% rename from pkg/sql/plan/build_alter_change_column.go rename to pkg/sql/planner/build_alter_change_column.go index 44210f5a67d31..844ee1a9e7082 100644 --- a/pkg/sql/plan/build_alter_change_column.go +++ b/pkg/sql/planner/build_alter_change_column.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -100,10 +100,10 @@ func ChangeColumn( func buildColumnAndConstraint( ctx CompilerContext, targetTableDef *plan.TableDef, - oldCol *ColDef, + oldCol *plan.ColDef, specNewColumn *tree.ColumnTableDef, colType plan.Type, -) (*ColDef, error) { +) (*plan.ColDef, error) { newColName := specNewColumn.Name.ColName() newColNameOrigin := specNewColumn.Name.ColNameOrigin() // Check if the new column name is valid and conflicts with internal hidden columns @@ -112,7 +112,7 @@ func buildColumnAndConstraint( return nil, err } - newCol := &ColDef{ + newCol := &plan.ColDef{ ColId: oldCol.ColId, Primary: oldCol.Primary, ClusterBy: oldCol.ClusterBy, @@ -139,7 +139,7 @@ func buildColumnAndConstraint( } else if targetTableDef.ClusterBy != nil && targetTableDef.ClusterBy.Name != "" { return nil, moerr.NewNotSupported(ctx.GetContext(), "cluster by with primary key is not support") } else { - targetTableDef.Pkey = &PrimaryKeyDef{ + targetTableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{newColName}, PkeyColName: newColName, } @@ -268,7 +268,7 @@ func checkColumnNameValid(ctx context.Context, colName string) error { // updateClusterByInTableDef Process the cluster by table. If the cluster by key name is modified, proceed with the process func updateClusterByInTableDef( ctx context.Context, - tableDef *TableDef, + tableDef *plan.TableDef, newColName string, originalColName string, ) { diff --git a/pkg/sql/plan/build_alter_modify_column.go b/pkg/sql/planner/build_alter_modify_column.go similarity index 93% rename from pkg/sql/plan/build_alter_modify_column.go rename to pkg/sql/planner/build_alter_modify_column.go index 955e85f668dfa..0c96282b5e342 100644 --- a/pkg/sql/plan/build_alter_modify_column.go +++ b/pkg/sql/planner/build_alter_modify_column.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -21,14 +21,14 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" ) func updateNewColumnInTableDef( cctx CompilerContext, - tableDef *TableDef, - oCol *ColDef, + tableDef *plan.TableDef, + oCol *plan.ColDef, nColSpec *tree.ColumnTableDef, nPos *tree.ColumnPosition, ) (bool, error) { @@ -107,13 +107,13 @@ func ModifyColumn( // and place the new column in the target location func modifyColPosition( ctx context.Context, - tableDef *TableDef, - oCol, nCol *ColDef, + tableDef *plan.TableDef, + oCol, nCol *plan.ColDef, pos *tree.ColumnPosition, ) error { if pos != nil && pos.Typ != tree.ColumnPositionNone { // detete old column - tableDef.Cols = RemoveIf[*ColDef](tableDef.Cols, func(col *ColDef) bool { + tableDef.Cols = RemoveIf[*plan.ColDef](tableDef.Cols, func(col *plan.ColDef) bool { return strings.EqualFold(col.Name, oCol.Name) }) @@ -123,7 +123,7 @@ func modifyColPosition( } tableDef.Cols = append( tableDef.Cols[:targetPos], - append([]*ColDef{nCol}, tableDef.Cols[targetPos:]...)..., + append([]*plan.ColDef{nCol}, tableDef.Cols[targetPos:]...)..., ) } else { for i, col := range tableDef.Cols { @@ -165,7 +165,7 @@ func checkChangeTypeCompatible( // checkColumnForeignkeyConstraint check for table column foreign key dependencies, including // the foreign keys of the table itself and being dependent on foreign keys of other tables -func checkColumnForeignkeyConstraint(ctx CompilerContext, tbInfo *TableDef, originalCol, newCol *ColDef) error { +func checkColumnForeignkeyConstraint(ctx CompilerContext, tbInfo *plan.TableDef, originalCol, newCol *plan.ColDef) error { if newCol.Typ.GetId() == originalCol.Typ.GetId() && newCol.Typ.GetWidth() == originalCol.Typ.GetWidth() && newCol.Typ.GetAutoIncr() == originalCol.Typ.GetAutoIncr() { @@ -208,7 +208,7 @@ func checkColumnForeignkeyConstraint(ctx CompilerContext, tbInfo *TableDef, orig if refTableDef == nil { return moerr.NewInternalErrorf(ctx.GetContext(), "The reference foreign key table %d does not exist", referredTblId) } - var referredFK *ForeignKeyDef + var referredFK *plan.ForeignKeyDef for _, fkInfo := range refTableDef.Fkeys { if fkInfo.ForeignTbl == tbInfo.TblId { referredFK = fkInfo @@ -244,7 +244,7 @@ func checkColumnForeignkeyConstraint(ctx CompilerContext, tbInfo *TableDef, orig } // checkPriKeyConstraint check all parts of a PRIMARY KEY must be NOT NULL -func checkPriKeyConstraint(ctx context.Context, col *ColDef, hasDefaultValue, hasNullFlag bool, priKeyDef *plan.PrimaryKeyDef) error { +func checkPriKeyConstraint(ctx context.Context, col *plan.ColDef, hasDefaultValue, hasNullFlag bool, priKeyDef *plan.PrimaryKeyDef) error { hasPriKeyFlag := false if col.Primary { hasPriKeyFlag = true diff --git a/pkg/sql/plan/build_alter_rename_column.go b/pkg/sql/planner/build_alter_rename_column.go similarity index 98% rename from pkg/sql/plan/build_alter_rename_column.go rename to pkg/sql/planner/build_alter_rename_column.go index 62bae922e9e15..a73edf4fdbe0d 100644 --- a/pkg/sql/plan/build_alter_rename_column.go +++ b/pkg/sql/planner/build_alter_rename_column.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -69,7 +69,7 @@ func RenameColumn( func updateRenameColumnInTableDef( ctx CompilerContext, - oldCol *ColDef, + oldCol *plan.ColDef, tableDef *plan.TableDef, spec *tree.AlterTableRenameColumnClause, ) (sqls []string, err error) { @@ -151,7 +151,7 @@ func updateRenameColumnInTableDef( // update column name itself for i, col := range tableDef.Cols { if strings.EqualFold(col.Name, oldColName) { - colDef := DeepCopyColDef(col) + colDef := plan.DeepCopyColDef(col) colDef.Name = newColName colDef.OriginName = newColNameOrigin tableDef.Cols[i] = colDef @@ -165,7 +165,7 @@ func updateRenameColumnInTableDef( func addRenameContextToAlterCtx( _ context.Context, alterCtx *AlterTableContext, - oldCol *ColDef, + oldCol *plan.ColDef, originTblName string, tableDef *plan.TableDef, spec *tree.AlterTableRenameColumnClause) error { @@ -224,7 +224,7 @@ func AlterColumn( for i, col := range tableDef.Cols { if strings.EqualFold(col.Name, originalCol.Name) { - colDef := DeepCopyColDef(col) + colDef := plan.DeepCopyColDef(col) if spec.OptionType == tree.AlterColumnOptionSetDefault { tmpColumnDef := tree.NewColumnTableDef(spec.ColumnName, nil, []tree.ColumnAttribute{spec.DefaultExpr}) defer func() { diff --git a/pkg/sql/plan/build_alter_table.go b/pkg/sql/planner/build_alter_table.go similarity index 96% rename from pkg/sql/plan/build_alter_table.go rename to pkg/sql/planner/build_alter_table.go index 072ab876f27be..0ff4b5295c6e1 100644 --- a/pkg/sql/plan/build_alter_table.go +++ b/pkg/sql/planner/build_alter_table.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -35,7 +35,7 @@ import ( "go.uber.org/zap" ) -func skipPkDedup(old, new *TableDef) bool { +func skipPkDedup(old, new *plan.TableDef) bool { oldPk := old.Pkey newPk := new.Pkey @@ -53,7 +53,7 @@ func skipPkDedup(old, new *TableDef) bool { return slices.Equal(oldPk.Names, newPk.Names) } -func skipUniqueIdxDedup(old, new *TableDef) map[string]bool { +func skipUniqueIdxDedup(old, new *plan.TableDef) map[string]bool { var skip map[string]bool // In spite of the O(n^2) complexity, // it's rare for a table to have enough indexes to cause @@ -79,7 +79,7 @@ func skipUniqueIdxDedup(old, new *TableDef) map[string]bool { return skip } -func buildAlterTableCopy(stmt *tree.AlterTable, cctx CompilerContext) (*Plan, error) { +func buildAlterTableCopy(stmt *tree.AlterTable, cctx CompilerContext) (*plan.Plan, error) { ctx := cctx.GetContext() // 1. get origin table name and Schema name schemaName, tableName := string(stmt.Table.Schema()), string(stmt.Table.Name()) @@ -87,7 +87,7 @@ func buildAlterTableCopy(stmt *tree.AlterTable, cctx CompilerContext) (*Plan, er schemaName = cctx.DefaultDatabase() } - var snapshot *Snapshot + var snapshot *plan.Snapshot _, tableDef, err := cctx.Resolve(schemaName, tableName, snapshot) if err != nil { return nil, err @@ -262,7 +262,7 @@ func buildAlterTableCopy(stmt *tree.AlterTable, cctx CompilerContext) (*Plan, er alterTablePlan.UpdateFkSqls = append(alterTablePlan.UpdateFkSqls, alterTableCtx.UpdateSqls...) //delete copy table records from mo_catalog.mo_foreign_keys alterTablePlan.UpdateFkSqls = append(alterTablePlan.UpdateFkSqls, getSqlForDeleteTable(schemaName, alterTableCtx.copyTableName)) - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_ALTER_TABLE, @@ -347,7 +347,7 @@ type AlterTableContext struct { originTableName string copyTableName string // key oldColId -> new ColDef - changColDefMap map[uint64]*ColDef + changColDefMap map[uint64]*plan.ColDef UpdateSqls []string } @@ -363,9 +363,9 @@ type selectExpr struct { sexprStr string } -func initAlterTableContext(originTableDef *TableDef, copyTableDef *TableDef, schemaName string) *AlterTableContext { +func initAlterTableContext(originTableDef *plan.TableDef, copyTableDef *plan.TableDef, schemaName string) *AlterTableContext { alterTblColMap := make(map[string]selectExpr) - changTblColIdMap := make(map[uint64]*ColDef) + changTblColIdMap := make(map[uint64]*plan.ColDef) for _, coldef := range originTableDef.Cols { if coldef.Hidden { continue @@ -391,8 +391,8 @@ func initAlterTableContext(originTableDef *TableDef, copyTableDef *TableDef, sch } } -func buildCopyTableDef(ctx context.Context, tableDef *TableDef) (*TableDef, error) { - replicaTableDef := DeepCopyTableDef(tableDef, true) +func buildCopyTableDef(ctx context.Context, tableDef *plan.TableDef) (*plan.TableDef, error) { + replicaTableDef := plan.DeepCopyTableDef(tableDef, true) id, err := uuid.NewV7() if err != nil { @@ -402,7 +402,7 @@ func buildCopyTableDef(ctx context.Context, tableDef *TableDef) (*TableDef, erro return replicaTableDef, nil } -func buildAlterTable(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, error) { +func buildAlterTable(stmt *tree.AlterTable, ctx CompilerContext) (*plan.Plan, error) { // ALTER TABLE tbl_name // [alter_option [, alter_option] ...] // [partition_options] @@ -492,7 +492,7 @@ func allowTempTableAlterForIndex(stmt *tree.AlterTable) bool { func ResolveAlterTableAlgorithm( ctx context.Context, validAlterSpecs []tree.AlterTableOption, - tableDef *TableDef, + tableDef *plan.TableDef, ) (algorithm plan.AlterTable_AlgorithmType, err error) { algorithm = plan.AlterTable_COPY for _, spec := range validAlterSpecs { @@ -570,7 +570,7 @@ func ResolveAlterTableAlgorithm( func isInplaceModifyColumn( ctx context.Context, clause *tree.AlterTableModifyColumnClause, - tableDef *TableDef, + tableDef *plan.TableDef, ) (ok bool, err error) { oCol := FindColumn(tableDef.Cols, clause.NewColumn.Name.ColName()) if oCol == nil { @@ -609,8 +609,8 @@ func isInplaceModifyColumn( func positionMatched( ctx context.Context, nPos *tree.ColumnPosition, - tableDef *TableDef, - oCol *ColDef, + tableDef *plan.TableDef, + oCol *plan.ColDef, ) (ok bool, err error) { ok = true if nPos != nil && nPos.Typ != tree.ColumnPositionNone { @@ -631,7 +631,7 @@ func positionMatched( func storageAgnosticType( ctx context.Context, nCol *tree.ColumnTableDef, - oCol *ColDef, + oCol *plan.ColDef, ) (ok bool, err error) { nTy, err := getTypeFromAst(ctx, nCol.Type) @@ -671,7 +671,7 @@ func storageAgnosticType( func storageAgnosticAttrs( _ context.Context, nCol *tree.ColumnTableDef, - oCol *ColDef, + oCol *plan.ColDef, ) (ok bool, err error) { ok = true for _, attr := range nCol.Attributes { @@ -711,7 +711,7 @@ func storageAgnosticAttrs( return } -func buildNotNullColumnVal(col *ColDef) string { +func buildNotNullColumnVal(col *plan.ColDef) string { var defaultValue string if col.Typ.Id == int32(types.T_int8) || col.Typ.Id == int32(types.T_int16) || diff --git a/pkg/sql/plan/build_alter_table_test.go b/pkg/sql/planner/build_alter_table_test.go similarity index 94% rename from pkg/sql/plan/build_alter_table_test.go rename to pkg/sql/planner/build_alter_table_test.go index 8519e639bcbf9..3e43b722f919c 100644 --- a/pkg/sql/plan/build_alter_table_test.go +++ b/pkg/sql/planner/build_alter_table_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -109,7 +109,7 @@ func Test_checkChangeTypeCompatible(t *testing.T) { } } -func buildSingleStmt(opt Optimizer, t *testing.T, sql string) (*Plan, error) { +func buildSingleStmt(opt Optimizer, t *testing.T, sql string) (*plan.Plan, error) { statements, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { return nil, err @@ -128,7 +128,7 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { tests := []struct { name string clause *tree.AlterTableModifyColumnClause - tableDef *TableDef + tableDef *plan.TableDef wantOk bool wantErr bool }{ @@ -146,8 +146,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -175,8 +175,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -204,8 +204,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -233,8 +233,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -266,8 +266,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { RelativeColumn: tree.NewUnresolvedColName("col2"), }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -306,8 +306,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -341,8 +341,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -377,8 +377,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -418,8 +418,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -456,8 +456,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, @@ -496,8 +496,8 @@ func TestAlterTableVarcharLengthBumped(t *testing.T) { }, }, }, - tableDef: &TableDef{ - Cols: []*ColDef{ + tableDef: &plan.TableDef{ + Cols: []*plan.ColDef{ { Name: "col1", ColId: 1, diff --git a/pkg/sql/plan/build_constraint_util.go b/pkg/sql/planner/build_constraint_util.go similarity index 92% rename from pkg/sql/plan/build_constraint_util.go rename to pkg/sql/planner/build_constraint_util.go index c17e38451344e..a5e90c08ee443 100644 --- a/pkg/sql/plan/build_constraint_util.go +++ b/pkg/sql/planner/build_constraint_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -25,8 +25,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -39,22 +39,22 @@ const ( type dmlSelectInfo struct { typ string - projectList []*Expr + projectList []*plan.Expr tblInfo *dmlTableInfo idx int32 rootId int32 derivedTableId int32 onDuplicateIdx []int32 - onDuplicateExpr map[string]*Expr + onDuplicateExpr map[string]*plan.Expr onDuplicateNeedAgg bool // if table have pk & unique key, that will be true. onDuplicateIsIgnore bool } type dmlTableInfo struct { typ string - objRef []*ObjectRef - tableDefs []*TableDef + objRef []*plan.ObjectRef + tableDefs []*plan.TableDef isClusterTable []bool haveConstraint bool isMulti bool @@ -204,7 +204,7 @@ func getUpdateTableInfo(ctx CompilerContext, stmt *tree.Update) (*dmlTableInfo, return newTblInfo, nil } -func checkTableType(ctx context.Context, tableDef *TableDef) error { +func checkTableType(ctx context.Context, tableDef *plan.TableDef) error { if tableDef.TableType == catalog.SystemSourceRel { return moerr.NewInvalidInput(ctx, "cannot insert/update/delete from source") } else if tableDef.TableType == catalog.SystemExternalRel { @@ -272,7 +272,7 @@ func setTableExprToDmlTableInfo(ctx CompilerContext, tbl tree.TableExpr, tblInfo return err } - var newCols []*ColDef + var newCols []*plan.ColDef for _, col := range tableDef.Cols { if col.Hidden && tblInfo.typ == "insert" { if col.Name == catalog.FakePrimaryKeyColName { @@ -322,7 +322,7 @@ func setTableExprToDmlTableInfo(ctx CompilerContext, tbl tree.TableExpr, tblInfo nowIdx := len(tblInfo.tableDefs) tblInfo.isClusterTable = append(tblInfo.isClusterTable, isClusterTable) - tblInfo.objRef = append(tblInfo.objRef, &ObjectRef{ + tblInfo.objRef = append(tblInfo.objRef, &plan.ObjectRef{ Obj: int64(tableDef.TblId), SchemaName: dbName, ObjName: tblName, @@ -513,9 +513,9 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse tag := builder.qry.Nodes[info.rootId].BindingTags[0] info.derivedTableId = info.rootId - oldProject := append([]*Expr{}, lastNode.ProjectList...) + oldProject := append([]*plan.Expr{}, lastNode.ProjectList...) - insertColToExpr := make(map[string]*Expr) + insertColToExpr := make(map[string]*plan.Expr) for i, column := range insertColumns { colIdx := colToIdx[column] projExpr := &plan.Expr{ @@ -581,7 +581,7 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse // -------- // rewrite 'insert into t1(b) values (1)' to // select 'select 0, _t.column_0 from (select * from values (1)) _t(column_0) - projectList := make([]*Expr, 0, len(tableDef.Cols)) + projectList := make([]*plan.Expr, 0, len(tableDef.Cols)) pkCols := make(map[string]struct{}) for _, name := range tableDef.Pkey.Names { pkCols[name] = struct{}{} @@ -626,7 +626,7 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse BindingTags: []int32{lastTag}, }, projectCtx) - info.projectList = make([]*Expr, 0, len(projectList)) + info.projectList = make([]*plan.Expr, 0, len(projectList)) info.derivedTableId = info.rootId for i, e := range projectList { info.projectList = append(info.projectList, &plan.Expr{ @@ -652,8 +652,8 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse stmt.OnDuplicateUpdate = nil } - rightTableDef := DeepCopyTableDef(tableDef, true) - rightObjRef := DeepCopyObjectRef(tableObjRef) + rightTableDef := plan.DeepCopyTableDef(tableDef, true) + rightObjRef := plan.DeepCopyObjectRef(tableObjRef) uniqueCols, uniqueColNames := GetUniqueColAndIdxFromTableDef(rightTableDef) if rightTableDef.Pkey != nil && rightTableDef.Pkey.PkeyColName == catalog.CPrimaryKeyColName { // rightTableDef.Cols = append(rightTableDef.Cols, MakeHiddenColDefByName(catalog.CPrimaryKeyColName)) @@ -693,9 +693,9 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse } } - var defExpr *Expr + var defExpr *plan.Expr idxs := make([]int32, len(rightTableDef.Cols)) - updateExprs := make(map[string]*Expr) + updateExprs := make(map[string]*plan.Expr) binder := NewUpdateBinder(builder.GetContext(), builder, nil, rightTableDef.Cols) for i, col := range rightTableDef.Cols { info.idx = info.idx + 1 @@ -730,14 +730,14 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse } // get join condition - var joinConds *Expr + var joinConds *plan.Expr joinIdx := 0 for _, uniqueColMap := range uniqueCols { - var condExpr *Expr + var condExpr *plan.Expr condIdx := int(0) for _, colIdx := range uniqueColMap { col := rightTableDef.Cols[colIdx] - leftExpr := &Expr{ + leftExpr := &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -755,14 +755,14 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse }, }, } - eqExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{leftExpr, rightExpr}) + eqExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return false, nil, nil, err } if condIdx == 0 { condExpr = eqExpr } else { - condExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "and", []*Expr{condExpr, eqExpr}) + condExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "and", []*plan.Expr{condExpr, eqExpr}) if err != nil { return false, nil, nil, err } @@ -773,7 +773,7 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse if joinIdx == 0 { joinConds = condExpr } else { - joinConds, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "or", []*Expr{joinConds, condExpr}) + joinConds, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "or", []*plan.Expr{joinConds, condExpr}) if err != nil { return false, nil, nil, err } @@ -791,7 +791,7 @@ func initInsertStmt(builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Inse NodeType: plan.Node_JOIN, Children: []int32{info.rootId, rightId}, JoinType: plan.Node_LEFT, - OnList: []*Expr{joinConds}, + OnList: []*plan.Expr{joinConds}, }, joinCtx) bindCtx.binder = NewTableBinder(builder, bindCtx) info.rootId = newRootId @@ -883,7 +883,7 @@ func deleteToSelect(builder *QueryBuilder, bindCtx *BindContext, node *tree.Dele return builder.bindSelect(astSelect, bindCtx, false) } -func checkNotNull(ctx context.Context, expr *Expr, tableDef *TableDef, col *ColDef) error { +func checkNotNull(ctx context.Context, expr *plan.Expr, tableDef *plan.TableDef, col *plan.ColDef) error { isConstantNull := false if ef, ok := expr.Expr.(*plan.Expr_Lit); ok { isConstantNull = ef.Lit.Isnull @@ -918,7 +918,7 @@ func checkNotNull(ctx context.Context, expr *Expr, tableDef *TableDef, col *ColD var ForceCastExpr = forceCastExpr -func forceCastExpr2(ctx context.Context, expr *Expr, t2 types.Type, targetType *plan.Expr) (*Expr, error) { +func forceCastExpr2(ctx context.Context, expr *plan.Expr, t2 types.Type, targetType *plan.Expr) (*plan.Expr, error) { if targetType.Typ.Id == 0 { return expr, nil } @@ -935,15 +935,15 @@ func forceCastExpr2(ctx context.Context, expr *Expr, t2 types.Type, targetType * return &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ - Func: &ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, - Args: []*Expr{expr, targetType}, + Func: &plan.ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, + Args: []*plan.Expr{expr, targetType}, }, }, Typ: targetType.Typ, }, nil } -func forceCastExpr(ctx context.Context, expr *Expr, targetType Type) (*Expr, error) { +func forceCastExpr(ctx context.Context, expr *plan.Expr, targetType plan.Type) (*plan.Expr, error) { if targetType.Id == 0 { return expr, nil } @@ -966,8 +966,8 @@ func forceCastExpr(ctx context.Context, expr *Expr, targetType Type) (*Expr, err return &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ - Func: &ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, - Args: []*Expr{expr, t}, + Func: &plan.ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, + Args: []*plan.Expr{expr, t}, }, }, Typ: targetType, @@ -1131,7 +1131,7 @@ func buildValueScan( info *dmlSelectInfo, builder *QueryBuilder, bindCtx *BindContext, - tableDef *TableDef, + tableDef *plan.TableDef, slt *tree.ValuesClause, updateColumns []string, colToIdx map[string]int, @@ -1153,7 +1153,7 @@ func buildValueScan( Name: "", Cols: make([]*plan.ColDef, colCount), } - projectList := make([]*Expr, colCount) + projectList := make([]*plan.Expr, colCount) for i, colName := range updateColumns { col := tableDef.Cols[colToIdx[colName]] @@ -1164,7 +1164,7 @@ func buildValueScan( T: &plan.TargetType{}, }, } - var defExpr *Expr + var defExpr *plan.Expr if isAllDefault { defExpr, err := getDefaultExpr(builder.GetContext(), col) if err != nil { @@ -1312,8 +1312,8 @@ func buildValueScan( func appendForeignConstrantPlan( builder *QueryBuilder, bindCtx *BindContext, - tableDef *TableDef, - objRef *ObjectRef, + tableDef *plan.TableDef, + objRef *plan.ObjectRef, sourceStep int32, isFkRecursionCall bool, ) error { @@ -1338,7 +1338,7 @@ func appendForeignConstrantPlan( // get filter exprs rowIdTyp := types.T_Rowid.ToType() - filters := make([]*Expr, len(tableDef.Fkeys)) + filters := make([]*plan.Expr, len(tableDef.Fkeys)) errExpr := makePlan2StringConstExprWithType("Cannot add or update a child row: a foreign key constraint fails") for i := range tableDef.Fkeys { colExpr := &plan.Expr{ @@ -1350,11 +1350,11 @@ func appendForeignConstrantPlan( }, }, } - nullCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*Expr{colExpr}) + nullCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*plan.Expr{colExpr}) if err != nil { return err } - filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*Expr{nullCheckExpr, errExpr}) + filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*plan.Expr{nullCheckExpr, errExpr}) if err != nil { return err } @@ -1362,7 +1362,7 @@ func appendForeignConstrantPlan( } // append filter node - filterNode := &Node{ + filterNode := &plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, FilterList: filters, @@ -1385,10 +1385,10 @@ func appendForeignConstrantPlan( func appendPrimaryConstraintPlan( builder *QueryBuilder, bindCtx *BindContext, - tableDef *TableDef, - objRef *ObjectRef, - partitionExpr *Expr, - pkFilterExprs []*Expr, + tableDef *plan.TableDef, + objRef *plan.ObjectRef, + partitionExpr *plan.Expr, + pkFilterExprs []*plan.Expr, indexSourceColTypes []*plan.Type, sourceStep int32, isUpdate bool, @@ -1440,12 +1440,12 @@ func appendPrimaryConstraintPlan( }, } - eqCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{MakePlan2Int64ConstExprWithType(1), countColExpr}) + eqCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{MakePlan2Int64ConstExprWithType(1), countColExpr}) if err != nil { return err } varcharType := types.T_varchar.ToType() - varcharExpr, err := makePlan2CastExpr(builder.GetContext(), &Expr{ + varcharExpr, err := makePlan2CastExpr(builder.GetContext(), &plan.Expr{ Typ: tableDef.Cols[pkPos].Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ColPos: 1, Name: tableDef.Cols[pkPos].Name}, @@ -1462,14 +1462,14 @@ func appendPrimaryConstraintPlan( colTypes = colTypes + "0" } } - filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*Expr{eqCheckExpr, varcharExpr, makePlan2StringConstExprWithType(tableDef.Cols[pkPos].Name), makePlan2StringConstExprWithType(colTypes)}) + filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*plan.Expr{eqCheckExpr, varcharExpr, makePlan2StringConstExprWithType(tableDef.Cols[pkPos].Name), makePlan2StringConstExprWithType(colTypes)}) if err != nil { return err } - filterNode := &Node{ + filterNode := &plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{filterExpr}, + FilterList: []*plan.Expr{filterExpr}, IsEnd: true, } lastNodeId = builder.appendNode(filterNode, bindCtx) @@ -1488,11 +1488,11 @@ func appendPrimaryConstraintPlan( }, } // sink_scan - sinkScanNode := &Node{ + sinkScanNode := &plan.Node{ NodeType: plan.Node_SINK_SCAN, Stats: &plan.Stats{}, SourceStep: []int32{sourceStep}, - ProjectList: []*Expr{ + ProjectList: []*plan.Expr{ &plan.Expr{ Typ: pkTyp, Expr: &plan.Expr_Col{ @@ -1514,13 +1514,13 @@ func appendPrimaryConstraintPlan( if pkSize > 1 { pkSize++ } - scanTableDef := DeepCopyTableDef(tableDef, false) - scanTableDef.Cols = make([]*ColDef, pkSize) + scanTableDef := plan.DeepCopyTableDef(tableDef, false) + scanTableDef.Cols = make([]*plan.ColDef, pkSize) for _, col := range tableDef.Cols { if i, ok := pkNameMap[col.Name]; ok { - scanTableDef.Cols[i] = DeepCopyColDef(col) + scanTableDef.Cols[i] = plan.DeepCopyColDef(col) } else if col.Name == scanTableDef.Pkey.PkeyColName { - scanTableDef.Cols[pkSize-1] = DeepCopyColDef(col) + scanTableDef.Cols[pkSize-1] = plan.DeepCopyColDef(col) break } } @@ -1530,10 +1530,10 @@ func appendPrimaryConstraintPlan( Stats: &plan.Stats{}, ObjRef: objRef, TableDef: scanTableDef, - ProjectList: []*Expr{{ + ProjectList: []*plan.Expr{{ Typ: pkTyp, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: int32(len(scanTableDef.Cols) - 1), Name: tableDef.Pkey.PkeyColName, }, @@ -1549,11 +1549,11 @@ func appendPrimaryConstraintPlan( var tableScanId int32 if len(pkFilterExprs) > 0 { - var blockFilterList []*Expr + var blockFilterList []*plan.Expr scanNode.FilterList = pkFilterExprs - blockFilterList = make([]*Expr, len(pkFilterExprs)) + blockFilterList = make([]*plan.Expr, len(pkFilterExprs)) for i, e := range pkFilterExprs { - blockFilterList[i] = DeepCopyExpr(e) + blockFilterList[i] = plan.DeepCopyExpr(e) } tableScanId = builder.appendNode(scanNode, bindCtx) scanNode.BlockFilterList = blockFilterList @@ -1564,7 +1564,7 @@ func appendPrimaryConstraintPlan( } // fuzzy_filter - fuzzyFilterNode := &Node{ + fuzzyFilterNode := &plan.Node{ NodeType: plan.Node_FUZZY_FILTER, Children: []int32{tableScanId, lastNodeId}, // trying to build on smaller child TableDef: tableDef, @@ -1607,26 +1607,26 @@ func appendPrimaryConstraintPlan( if isUpdate && updatePkCol { // update stmt && pk included in update cols lastNodeId = appendSinkScanNode(builder, bindCtx, sourceStep) - scanTableDef := DeepCopyTableDef(tableDef, false) + scanTableDef := plan.DeepCopyTableDef(tableDef, false) rowIdIdx := len(tableDef.Cols) rowIdDef := MakeRowIdColDef() tableDef.Cols = append(tableDef.Cols, rowIdDef) - scanTableDef.Cols = []*plan.ColDef{DeepCopyColDef(tableDef.Cols[pkPos]), DeepCopyColDef(rowIdDef)} + scanTableDef.Cols = []*plan.ColDef{plan.DeepCopyColDef(tableDef.Cols[pkPos]), plan.DeepCopyColDef(rowIdDef)} - scanPkExpr := &Expr{ + scanPkExpr := &plan.Expr{ Typ: pkTyp, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ Name: tableDef.Pkey.PkeyColName, }, }, } - scanRowIdExpr := &Expr{ + scanRowIdExpr := &plan.Expr{ Typ: rowIdDef.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 1, Name: rowIdDef.Name, }, @@ -1641,27 +1641,27 @@ func appendPrimaryConstraintPlan( }, }, } - scanNode := &Node{ + scanNode := &plan.Node{ NodeType: plan.Node_TABLE_SCAN, Stats: &plan.Stats{}, ObjRef: objRef, TableDef: scanTableDef, - ProjectList: []*Expr{scanPkExpr, scanRowIdExpr}, + ProjectList: []*plan.Expr{scanPkExpr, scanRowIdExpr}, RuntimeFilterProbeList: []*plan.RuntimeFilterSpec{MakeRuntimeFilter(rfTag, false, 0, probeExpr, false)}, } rightId := builder.appendNode(scanNode, bindCtx) - pkColExpr := &Expr{ + pkColExpr := &plan.Expr{ Typ: pkTyp, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: 1, ColPos: int32(pkPos), Name: tableDef.Pkey.PkeyColName, }, }, } - rightExpr := &Expr{ + rightExpr := &plan.Expr{ Typ: pkTyp, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1669,23 +1669,23 @@ func appendPrimaryConstraintPlan( }, }, } - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{pkColExpr, rightExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{pkColExpr, rightExpr}) if err != nil { return err } - rightRowIdExpr := &Expr{ + rightRowIdExpr := &plan.Expr{ Typ: rowIdDef.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 1, Name: rowIdDef.Name, }, }, } - rowIdExpr := &Expr{ + rowIdExpr := &plan.Expr{ Typ: rowIdDef.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: 1, ColPos: int32(rowIdIdx), Name: rowIdDef.Name, @@ -1707,19 +1707,19 @@ func appendPrimaryConstraintPlan( Children: []int32{rightId, lastNodeId}, JoinType: plan.Node_RIGHT, IsRightJoin: true, - OnList: []*Expr{condExpr}, - ProjectList: []*Expr{rowIdExpr, rightRowIdExpr, pkColExpr}, + OnList: []*plan.Expr{condExpr}, + ProjectList: []*plan.Expr{rowIdExpr, rightRowIdExpr, pkColExpr}, RuntimeFilterBuildList: []*plan.RuntimeFilterSpec{MakeRuntimeFilter(rfTag, false, GetInFilterCardLimitOnPK(sid, scanNode.Stats.TableCnt), buildExpr, false)}, } lastNodeId = builder.appendNode(joinNode, bindCtx) recalcStatsByRuntimeFilter(scanNode, joinNode, builder) // append agg node. - aggGroupBy := []*Expr{ + aggGroupBy := []*plan.Expr{ { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 0, Name: catalog.Row_ID, }, @@ -1727,7 +1727,7 @@ func appendPrimaryConstraintPlan( { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 1, Name: catalog.Row_ID, }, @@ -1735,17 +1735,17 @@ func appendPrimaryConstraintPlan( { Typ: pkColExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 2, Name: tableDef.Pkey.PkeyColName, }, }}, } - aggProject := []*Expr{ + aggProject := []*plan.Expr{ { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: 0, Name: catalog.Row_ID, @@ -1754,7 +1754,7 @@ func appendPrimaryConstraintPlan( { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: 1, Name: catalog.Row_ID, @@ -1763,14 +1763,14 @@ func appendPrimaryConstraintPlan( { Typ: pkColExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: 2, Name: tableDef.Pkey.PkeyColName, }, }}, } - aggNode := &Node{ + aggNode := &plan.Node{ NodeType: plan.Node_AGG, Children: []int32{lastNodeId}, GroupBy: aggGroupBy, @@ -1780,24 +1780,24 @@ func appendPrimaryConstraintPlan( lastNodeId = builder.appendNode(aggNode, bindCtx) // append filter node - filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "not_in_rows", []*Expr{ + filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "not_in_rows", []*plan.Expr{ { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ColPos: 1, Name: catalog.Row_ID}, + Col: &plan.ColRef{ColPos: 1, Name: catalog.Row_ID}, }, }, { Typ: rowIdExpr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ColPos: 0, Name: catalog.Row_ID}, + Col: &plan.ColRef{ColPos: 0, Name: catalog.Row_ID}, }, }, }) if err != nil { return err } - colExpr := &Expr{ + colExpr := &plan.Expr{ Typ: rowIdDef.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1806,11 +1806,11 @@ func appendPrimaryConstraintPlan( }, } - lastNodeId = builder.appendNode(&Node{ + lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{filterExpr}, - ProjectList: []*Expr{ + FilterList: []*plan.Expr{filterExpr}, + ProjectList: []*plan.Expr{ colExpr, { Typ: tableDef.Cols[pkPos].Typ, @@ -1822,13 +1822,13 @@ func appendPrimaryConstraintPlan( }, bindCtx) // append assert node - isEmptyExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isempty", []*Expr{colExpr}) + isEmptyExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isempty", []*plan.Expr{colExpr}) if err != nil { return err } varcharType := types.T_varchar.ToType() - varcharExpr, err := makePlan2CastExpr(builder.GetContext(), &Expr{ + varcharExpr, err := makePlan2CastExpr(builder.GetContext(), &plan.Expr{ Typ: tableDef.Cols[pkPos].Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ColPos: 1, Name: tableDef.Cols[pkPos].Name}, @@ -1845,14 +1845,14 @@ func appendPrimaryConstraintPlan( colTypes = colTypes + "0" } } - assertExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*Expr{isEmptyExpr, varcharExpr, makePlan2StringConstExprWithType(tableDef.Cols[pkPos].Name), makePlan2StringConstExprWithType(colTypes)}) + assertExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*plan.Expr{isEmptyExpr, varcharExpr, makePlan2StringConstExprWithType(tableDef.Cols[pkPos].Name), makePlan2StringConstExprWithType(colTypes)}) if err != nil { return err } - lastNodeId = builder.appendNode(&Node{ + lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{assertExpr}, + FilterList: []*plan.Expr{assertExpr}, IsEnd: true, }, bindCtx) builder.appendStep(lastNodeId) diff --git a/pkg/sql/plan/build_dcl.go b/pkg/sql/planner/build_dcl.go similarity index 90% rename from pkg/sql/plan/build_dcl.go rename to pkg/sql/planner/build_dcl.go index 7876df2e9d951..32dee12848005 100644 --- a/pkg/sql/plan/build_dcl.go +++ b/pkg/sql/planner/build_dcl.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "math" @@ -23,7 +23,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" ) -func getPreparePlan(ctx CompilerContext, stmt tree.Statement) (*Plan, error) { +func getPreparePlan(ctx CompilerContext, stmt tree.Statement) (*plan.Plan, error) { if s, ok := stmt.(*tree.Insert); ok { if _, ok := s.Rows.Select.(*tree.ValuesClause); ok { return BuildPlan(ctx, stmt, true) @@ -44,8 +44,8 @@ func getPreparePlan(ctx CompilerContext, stmt tree.Statement) (*Plan, error) { if err != nil { return nil, err } - return &Plan{ - Plan: &Plan_Query{ + return &plan.Plan{ + Plan: &plan.Plan_Query{ Query: optimized, }, }, nil @@ -54,8 +54,8 @@ func getPreparePlan(ctx CompilerContext, stmt tree.Statement) (*Plan, error) { } } -func buildPrepare(stmt tree.Prepare, ctx CompilerContext) (*Plan, error) { - var preparePlan *Plan +func buildPrepare(stmt tree.Prepare, ctx CompilerContext) (*plan.Plan, error) { + var preparePlan *plan.Plan var err error var stmtName string @@ -109,7 +109,7 @@ func buildPrepare(stmt tree.Prepare, ctx CompilerContext) (*Plan, error) { ParamTypes: paramTypes, } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_PREPARE, @@ -121,11 +121,11 @@ func buildPrepare(stmt tree.Prepare, ctx CompilerContext) (*Plan, error) { }, nil } -func buildExecute(stmt *tree.Execute, ctx CompilerContext) (*Plan, error) { +func buildExecute(stmt *tree.Execute, ctx CompilerContext) (*plan.Plan, error) { builder := NewQueryBuilder(plan.Query_SELECT, ctx, false, false) binder := NewWhereBinder(builder, &BindContext{}) - args := make([]*Expr, len(stmt.Variables)) + args := make([]*plan.Expr, len(stmt.Variables)) for idx, variable := range stmt.Variables { arg, err := binder.baseBindExpr(variable, 0, true) if err != nil { @@ -139,7 +139,7 @@ func buildExecute(stmt *tree.Execute, ctx CompilerContext) (*Plan, error) { Args: args, } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_EXECUTE, @@ -151,12 +151,12 @@ func buildExecute(stmt *tree.Execute, ctx CompilerContext) (*Plan, error) { }, nil } -func buildDeallocate(stmt *tree.Deallocate, _ CompilerContext) (*Plan, error) { +func buildDeallocate(stmt *tree.Deallocate, _ CompilerContext) (*plan.Plan, error) { deallocate := &plan.Deallocate{ Name: string(stmt.Name), } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_DEALLOCATE, @@ -168,7 +168,7 @@ func buildDeallocate(stmt *tree.Deallocate, _ CompilerContext) (*Plan, error) { }, nil } -func buildSetVariables(stmt *tree.SetVar, ctx CompilerContext) (*Plan, error) { +func buildSetVariables(stmt *tree.SetVar, ctx CompilerContext) (*plan.Plan, error) { var err error items := make([]*plan.SetVariablesItem, len(stmt.Assignments)) @@ -201,7 +201,7 @@ func buildSetVariables(stmt *tree.SetVar, ctx CompilerContext) (*Plan, error) { Items: items, } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_SET_VARIABLES, @@ -213,7 +213,7 @@ func buildSetVariables(stmt *tree.SetVar, ctx CompilerContext) (*Plan, error) { }, nil } -func buildCreateAccount(stmt *tree.CreateAccount, ctx CompilerContext, isPrepareStmt bool) (*Plan, error) { +func buildCreateAccount(stmt *tree.CreateAccount, ctx CompilerContext, isPrepareStmt bool) (*plan.Plan, error) { params := []tree.Expr{ stmt.Name, stmt.AuthOption.AdminName, @@ -224,7 +224,7 @@ func buildCreateAccount(stmt *tree.CreateAccount, ctx CompilerContext, isPrepare return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_CREATE_ACCOUNT, @@ -238,7 +238,7 @@ func buildCreateAccount(stmt *tree.CreateAccount, ctx CompilerContext, isPrepare }, nil } -func buildAlterAccount(stmt *tree.AlterAccount, ctx CompilerContext, isPrepareStmt bool) (*Plan, error) { +func buildAlterAccount(stmt *tree.AlterAccount, ctx CompilerContext, isPrepareStmt bool) (*plan.Plan, error) { params := []tree.Expr{ stmt.Name, stmt.AuthOption.AdminName, @@ -249,7 +249,7 @@ func buildAlterAccount(stmt *tree.AlterAccount, ctx CompilerContext, isPrepareSt return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_ALTER_ACCOUNT, @@ -263,7 +263,7 @@ func buildAlterAccount(stmt *tree.AlterAccount, ctx CompilerContext, isPrepareSt }, nil } -func buildDropAccount(stmt *tree.DropAccount, ctx CompilerContext, isPrepareStmt bool) (*Plan, error) { +func buildDropAccount(stmt *tree.DropAccount, ctx CompilerContext, isPrepareStmt bool) (*plan.Plan, error) { params := []tree.Expr{ stmt.Name, } @@ -272,7 +272,7 @@ func buildDropAccount(stmt *tree.DropAccount, ctx CompilerContext, isPrepareStmt return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Dcl{ Dcl: &plan.DataControl{ DclType: plan.DataControl_DROP_ACCOUNT, diff --git a/pkg/sql/plan/build_ddl.go b/pkg/sql/planner/build_ddl.go similarity index 95% rename from pkg/sql/plan/build_ddl.go rename to pkg/sql/planner/build_ddl.go index d5babe6dc4152..2b409231f7cad 100644 --- a/pkg/sql/plan/build_ddl.go +++ b/pkg/sql/planner/build_ddl.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -42,7 +42,7 @@ func genDynamicTableDef(ctx CompilerContext, stmt *tree.Select) (*plan.TableDef, var tableDef plan.TableDef // check view statement - var stmtPlan *Plan + var stmtPlan *plan.Plan var err error switch s := stmt.Select.(type) { case *tree.ParenSelect: @@ -104,7 +104,7 @@ func genViewTableDef(ctx CompilerContext, stmt *tree.Select) (*plan.TableDef, er var tableDef plan.TableDef // check view statement - var stmtPlan *Plan + var stmtPlan *plan.Plan var err error switch s := stmt.Select.(type) { case *tree.ParenSelect: @@ -179,7 +179,7 @@ func genViewTableDef(ctx CompilerContext, stmt *tree.Select) (*plan.TableDef, er return &tableDef, nil } -func genAsSelectCols(ctx CompilerContext, stmt *tree.Select) ([]*ColDef, error) { +func genAsSelectCols(ctx CompilerContext, stmt *tree.Select) ([]*plan.ColDef, error) { var err error var rootId int32 builder := NewQueryBuilder(plan.Query_SELECT, ctx, false, false) @@ -239,11 +239,11 @@ func genAsSelectCols(ctx CompilerContext, stmt *tree.Select) ([]*ColDef, error) return cols, nil } -func buildCreateSource(stmt *tree.CreateSource, ctx CompilerContext) (*Plan, error) { +func buildCreateSource(stmt *tree.CreateSource, ctx CompilerContext) (*plan.Plan, error) { streamName := string(stmt.SourceName.ObjectName) createStream := &plan.CreateTable{ IfNotExists: stmt.IfNotExists, - TableDef: &TableDef{ + TableDef: &plan.TableDef{ TableType: catalog.SystemSourceRel, Name: streamName, }, @@ -292,7 +292,7 @@ func buildCreateSource(stmt *tree.CreateSource, ctx CompilerContext) (*Plan, err }, }, }) - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_TABLE, @@ -305,7 +305,7 @@ func buildCreateSource(stmt *tree.CreateSource, ctx CompilerContext) (*Plan, err } func buildSourceDefs(stmt *tree.CreateSource, ctx CompilerContext, createStream *plan.CreateTable) error { - colMap := make(map[string]*ColDef) + colMap := make(map[string]*plan.ColDef) for _, item := range stmt.Defs { switch def := item.(type) { case *tree.ColumnTableDef: @@ -324,7 +324,7 @@ func buildSourceDefs(stmt *tree.CreateSource, ctx CompilerContext, createStream return moerr.NewInvalidInputf(ctx.GetContext(), "string width (%d) is too long", colType.GetWidth()) } } - col := &ColDef{ + col := &plan.ColDef{ Name: colName, OriginName: colNameOrigin, Alg: plan.CompressType_Lz4, @@ -350,13 +350,13 @@ func buildSourceDefs(stmt *tree.CreateSource, ctx CompilerContext, createStream return nil } -func buildCreateView(stmt *tree.CreateView, ctx CompilerContext) (*Plan, error) { +func buildCreateView(stmt *tree.CreateView, ctx CompilerContext) (*plan.Plan, error) { viewName := stmt.Name.ObjectName createView := &plan.CreateView{ Replace: stmt.Replace, IfNotExists: stmt.IfNotExists, - TableDef: &TableDef{ + TableDef: &plan.TableDef{ Name: string(viewName), }, } @@ -371,7 +371,7 @@ func buildCreateView(stmt *tree.CreateView, ctx CompilerContext) (*Plan, error) createView.Database = ctx.DefaultDatabase() } - snapshot := &Snapshot{TS: ×tamp.Timestamp{}} + snapshot := &plan.Snapshot{TS: ×tamp.Timestamp{}} if IsSnapshotValid(ctx.GetSnapshot()) { snapshot = ctx.GetSnapshot() } @@ -391,7 +391,7 @@ func buildCreateView(stmt *tree.CreateView, ctx CompilerContext) (*Plan, error) createView.TableDef.ViewSql = tableDef.ViewSql createView.TableDef.Defs = tableDef.Defs - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_VIEW, @@ -442,7 +442,7 @@ func buildSequenceTableDef(stmt *tree.CreateSequence, ctx CompilerContext, cs *p OriginString: "", }, } - cs.TableDef.Pkey = &PrimaryKeyDef{ + cs.TableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{Sequence_cols_name[4]}, PkeyColName: Sequence_cols_name[4], } @@ -540,7 +540,7 @@ func buildAlterSequenceTableDef(stmt *tree.AlterSequence, ctx CompilerContext, a OriginString: "", }, } - as.TableDef.Pkey = &PrimaryKeyDef{ + as.TableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{Sequence_cols_name[4]}, PkeyColName: Sequence_cols_name[4], } @@ -585,7 +585,7 @@ func buildAlterSequenceTableDef(stmt *tree.AlterSequence, ctx CompilerContext, a } -func buildDropSequence(stmt *tree.DropSequence, ctx CompilerContext) (*Plan, error) { +func buildDropSequence(stmt *tree.DropSequence, ctx CompilerContext) (*plan.Plan, error) { dropSequence := &plan.DropSequence{ IfExists: stmt.IfExists, } @@ -612,7 +612,7 @@ func buildDropSequence(stmt *tree.DropSequence, ctx CompilerContext) (*Plan, err return nil, moerr.NewInternalError(ctx.GetContext(), "cannot drop sequence in subscription database") } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_SEQUENCE, @@ -624,14 +624,14 @@ func buildDropSequence(stmt *tree.DropSequence, ctx CompilerContext) (*Plan, err }, nil } -func buildAlterSequence(stmt *tree.AlterSequence, ctx CompilerContext) (*Plan, error) { +func buildAlterSequence(stmt *tree.AlterSequence, ctx CompilerContext) (*plan.Plan, error) { if stmt.Type == nil && stmt.IncrementBy == nil && stmt.MaxValue == nil && stmt.MinValue == nil && stmt.StartWith == nil && stmt.Cycle == nil { return nil, moerr.NewSyntaxErrorf(ctx.GetContext(), "synatx error, %s has nothing to alter", string(stmt.Name.ObjectName)) } alterSequence := &plan.AlterSequence{ IfExists: stmt.IfExists, - TableDef: &TableDef{ + TableDef: &plan.TableDef{ Name: string(stmt.Name.ObjectName), }, } @@ -653,7 +653,7 @@ func buildAlterSequence(stmt *tree.AlterSequence, ctx CompilerContext) (*Plan, e return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_ALTER_SEQUENCE, @@ -665,10 +665,10 @@ func buildAlterSequence(stmt *tree.AlterSequence, ctx CompilerContext) (*Plan, e }, nil } -func buildCreateSequence(stmt *tree.CreateSequence, ctx CompilerContext) (*Plan, error) { +func buildCreateSequence(stmt *tree.CreateSequence, ctx CompilerContext) (*plan.Plan, error) { createSequence := &plan.CreateSequence{ IfNotExists: stmt.IfNotExists, - TableDef: &TableDef{ + TableDef: &plan.TableDef{ Name: string(stmt.Name.ObjectName), }, } @@ -690,7 +690,7 @@ func buildCreateSequence(stmt *tree.CreateSequence, ctx CompilerContext) (*Plan, return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_SEQUENCE, @@ -706,7 +706,7 @@ func buildCreateTable( ctx CompilerContext, stmt *tree.CreateTable, cloneStmt *tree.CloneTable, -) (*Plan, error) { +) (*plan.Plan, error) { if stmt.IsAsLike { var err error @@ -766,7 +766,7 @@ func buildCreateTable( createTable := &plan.CreateTable{ IfNotExists: stmt.IfNotExists, Temporary: stmt.Temporary, - TableDef: &TableDef{ + TableDef: &plan.TableDef{ Name: string(stmt.Table.ObjectName), }, } @@ -811,7 +811,7 @@ func buildCreateTable( //createTable.TableDef.Defs = tableDef.Defs } - var asSelectCols []*ColDef + var asSelectCols []*plan.ColDef if stmt.IsAsSelect { if asSelectCols, err = genAsSelectCols(ctx, stmt.AsSource); err != nil { return nil, err @@ -991,7 +991,7 @@ func buildCreateTable( createTable.TableDef.TableType = catalog.SystemTemporaryTable } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_TABLE, @@ -1003,12 +1003,12 @@ func buildCreateTable( }, nil } -func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *plan.CreateTable, asSelectCols []*ColDef) error { +func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *plan.CreateTable, asSelectCols []*plan.ColDef) error { // all below fields' key is lower case var primaryKeys []string var indexs []string var fulltext_indexs []string - colMap := make(map[string]*ColDef) + colMap := make(map[string]*plan.ColDef) defaultMap := make(map[string]string) uniqueIndexInfos := make([]*tree.UniqueIndex, 0) fullTextIndexInfos := make([]*tree.FullTextIndex, 0) @@ -1117,7 +1117,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl } colType.AutoIncr = auto_incr - col := &ColDef{ + col := &plan.ColDef{ Name: colName, OriginName: colNameOrigin, Alg: plan.CompressType_Lz4, @@ -1127,7 +1127,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl Comment: comment, } // if same name col in asSelectCols, overwrite it; add into colMap && createTable.TableDef.Cols later - if idx := slices.IndexFunc(asSelectCols, func(c *ColDef) bool { return c.Name == col.Name }); idx != -1 { + if idx := slices.IndexFunc(asSelectCols, func(c *plan.ColDef) bool { return c.Name == col.Name }); idx != -1 { asSelectCols[idx] = col } else { colMap[colName] = col @@ -1282,7 +1282,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl firstCol := true for i := range cols { // insert default values if col[i] only in create clause - if !slices.ContainsFunc(asSelectCols, func(c *ColDef) bool { return c.Name == cols[i].Name }) { + if !slices.ContainsFunc(asSelectCols, func(c *plan.ColDef) bool { return c.Name == cols[i].Name }) { if !firstCol { insertSqlBuilder.WriteString(", ") } @@ -1321,15 +1321,15 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl if err != nil { return err } - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: util.GetClusterTableAttributeName(), Alg: plan.CompressType_Lz4, Typ: colType, NotNull: true, Default: &plan.Default{ - Expr: &Expr{ + Expr: &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_U32Val{U32Val: catalog.System_Account}, }, @@ -1361,7 +1361,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl for _, col := range createTable.TableDef.Cols { if col.Name == pkeyName { col.Primary = true - createTable.TableDef.Pkey = &PrimaryKeyDef{ + createTable.TableDef.Pkey = &plan.PrimaryKeyDef{ Names: primaryKeys, PkeyColName: pkeyName, } @@ -1376,7 +1376,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl createTable.TableDef.Cols = append(createTable.TableDef.Cols, colDef) colMap[pkeyName] = colDef - pkeyDef := &PrimaryKeyDef{ + pkeyDef := &plan.PrimaryKeyDef{ Names: primaryKeys, PkeyColName: pkeyName, CompPkeyCol: colDef, @@ -1393,11 +1393,11 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl // locks to the Lock operator in pessimistic transaction mode. if !createTable.IsSystemExternalRel() { pkeyName = catalog.FakePrimaryKeyColName - colDef := &ColDef{ + colDef := &plan.ColDef{ ColId: uint64(len(createTable.TableDef.Cols)), Name: pkeyName, Hidden: true, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_uint64), AutoIncr: true, }, @@ -1413,7 +1413,7 @@ func buildTableDefs(stmt *tree.CreateTable, ctx CompilerContext, createTable *pl createTable.TableDef.Cols = append(createTable.TableDef.Cols, colDef) colMap[pkeyName] = colDef - createTable.TableDef.Pkey = &PrimaryKeyDef{ + createTable.TableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{pkeyName}, PkeyColName: pkeyName, } @@ -1601,7 +1601,7 @@ func getRefAction(typ tree.ReferenceOptionType) plan.ForeignKeyDef_RefAction { // cluster by (word) // // ) -func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.FullTextIndex, colMap map[string]*ColDef, existedIndexes []*plan.IndexDef, pkeyName string, ctx CompilerContext) error { +func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.FullTextIndex, colMap map[string]*plan.ColDef, existedIndexes []*plan.IndexDef, pkeyName string, ctx CompilerContext) error { if pkeyName == "" || pkeyName == catalog.FakePrimaryKeyColName { return moerr.NewInternalErrorNoCtx("primary key cannot be empty for fulltext index") } @@ -1696,13 +1696,13 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F // create fulltext index hidden table definition // doc_id, pos, word - tableDef := &TableDef{ + tableDef := &plan.TableDef{ Name: indexTableName, } // foreign primary key column keyName := catalog.FullTextIndex_TabCol_Id - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -1720,7 +1720,7 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F // position (int32) keyName = catalog.FullTextIndex_TabCol_Position - colDef = &ColDef{ + colDef = &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -1738,7 +1738,7 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F // word (varchar) keyName = catalog.FullTextIndex_TabCol_Word - colDef = &ColDef{ + colDef = &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -1754,11 +1754,11 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F tableDef.Cols = append(tableDef.Cols, colDef) keyName = catalog.FakePrimaryKeyColName - colDef = &ColDef{ + colDef = &plan.ColDef{ Name: keyName, Hidden: true, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_uint64), AutoIncr: true, }, @@ -1773,12 +1773,12 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } - tableDef.ClusterBy = &ClusterByDef{ + tableDef.ClusterBy = &plan.ClusterByDef{ Name: "word", } @@ -1803,7 +1803,7 @@ func buildFullTextIndexTable(createTable *plan.CreateTable, indexInfos []*tree.F return nil } -func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.UniqueIndex, colMap map[string]*ColDef, pkeyName string, ctx CompilerContext) error { +func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.UniqueIndex, colMap map[string]*plan.ColDef, pkeyName string, ctx CompilerContext) error { for _, indexInfo := range indexInfos { indexDef := &plan.IndexDef{} indexDef.Unique = true @@ -1813,7 +1813,7 @@ func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.Uni if err != nil { return err } - tableDef := &TableDef{ + tableDef := &plan.TableDef{ Name: indexTableName, } indexParts := make([]string, 0) @@ -1847,10 +1847,10 @@ func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.Uni if len(indexInfo.KeyParts) == 1 { keyName = catalog.IndexTableIndexColName colName := indexInfo.KeyParts[0].ColName.ColName() - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: colMap[colName].Typ.Id, Width: colMap[colName].Typ.Width, Scale: colMap[colName].Typ.Scale, @@ -1862,16 +1862,16 @@ func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.Uni }, } tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } } else { keyName = catalog.IndexTableIndexColName - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -1882,13 +1882,13 @@ func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.Uni }, } tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } } if pkeyName != "" { - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: catalog.IndexTablePrimaryColName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -1935,7 +1935,7 @@ func buildUniqueIndexTable(createTable *plan.CreateTable, indexInfos []*tree.Uni return nil } -func buildSecondaryIndexDef(createTable *plan.CreateTable, indexInfos []*tree.Index, colMap map[string]*ColDef, existedIndexes []*plan.IndexDef, pkeyName string, ctx CompilerContext) (err error) { +func buildSecondaryIndexDef(createTable *plan.CreateTable, indexInfos []*tree.Index, colMap map[string]*plan.ColDef, existedIndexes []*plan.IndexDef, pkeyName string, ctx CompilerContext) (err error) { if len(pkeyName) == 0 { return moerr.NewInternalErrorNoCtx("primary key cannot be empty for secondary index") } @@ -1947,7 +1947,7 @@ func buildSecondaryIndexDef(createTable *plan.CreateTable, indexInfos []*tree.In } var indexDef []*plan.IndexDef - var tableDef []*TableDef + var tableDef []*plan.TableDef switch indexInfo.KeyType { case tree.INDEX_TYPE_BTREE, tree.INDEX_TYPE_INVALID: indexDef, tableDef, err = buildRegularSecondaryIndexDef(ctx, indexInfo, colMap, pkeyName) @@ -1980,7 +1980,7 @@ func buildSecondaryIndexDef(createTable *plan.CreateTable, indexInfos []*tree.In // primary key __mo_index_idx_col, // // ) -func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*ColDef, pkeyName string) ([]*plan.IndexDef, []*TableDef, error) { +func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*plan.ColDef, pkeyName string) ([]*plan.IndexDef, []*plan.TableDef, error) { // 1. indexDef init indexDef := &plan.IndexDef{} indexDef.Unique = false @@ -1990,7 +1990,7 @@ func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, co if err != nil { return nil, nil, err } - tableDef := &TableDef{ + tableDef := &plan.TableDef{ Name: indexTableName, } @@ -2012,10 +2012,10 @@ func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, co } var keyName = catalog.MasterIndexTableIndexColName - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -2026,12 +2026,12 @@ func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, co }, } tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } if pkeyName != "" { - pkColDef := &ColDef{ + pkColDef := &plan.ColDef{ Name: catalog.MasterIndexTablePrimaryColName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2093,7 +2093,7 @@ func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, co indexDef.Comment = "" indexDef.IndexAlgoParams = "" } - return []*plan.IndexDef{indexDef}, []*TableDef{tableDef}, nil + return []*plan.IndexDef{indexDef}, []*plan.TableDef{tableDef}, nil } // buildRegularSecondingIndexDef will create a hidden index table with schema @@ -2117,7 +2117,7 @@ func buildMasterSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, co // primary key __mo_index_idx_col, // // ) -func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*ColDef, pkeyName string) ([]*plan.IndexDef, []*TableDef, error) { +func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*plan.ColDef, pkeyName string) ([]*plan.IndexDef, []*plan.TableDef, error) { // 1. indexDef init indexDef := &plan.IndexDef{} @@ -2128,7 +2128,7 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c if err != nil { return nil, nil, err } - tableDef := &TableDef{ + tableDef := &plan.TableDef{ Name: indexTableName, } @@ -2172,7 +2172,7 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c if len(indexParts) == 1 { // This means indexParts only contains the primary key column keyName = catalog.IndexTableIndexColName - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2188,16 +2188,16 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c }, } tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } } else { keyName = catalog.IndexTableIndexColName - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: keyName, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -2208,13 +2208,13 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c }, } tableDef.Cols = append(tableDef.Cols, colDef) - tableDef.Pkey = &PrimaryKeyDef{ + tableDef.Pkey = &plan.PrimaryKeyDef{ Names: []string{keyName}, PkeyColName: keyName, } } if pkeyName != "" { - colDef := &ColDef{ + colDef := &plan.ColDef{ Name: catalog.IndexTablePrimaryColName, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2276,7 +2276,7 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c indexDef.Comment = "" indexDef.IndexAlgoParams = "" } - return []*plan.IndexDef{indexDef}, []*TableDef{tableDef}, nil + return []*plan.IndexDef{indexDef}, []*plan.TableDef{tableDef}, nil } // buildIvfFlatSecondIndexDef create three internal tables @@ -2303,7 +2303,7 @@ func buildRegularSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c // primary key (__mo_index_centriod_fk_version, __mo_index_centroid_fk_id, __mo_index_pri_col) // ) -func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*ColDef, existedIndexes []*plan.IndexDef, pkeyName string) ([]*plan.IndexDef, []*TableDef, error) { +func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*plan.ColDef, existedIndexes []*plan.IndexDef, pkeyName string) ([]*plan.IndexDef, []*plan.TableDef, error) { indexParts := make([]string, 1) @@ -2334,7 +2334,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c } indexDefs := make([]*plan.IndexDef, 3) - tableDefs := make([]*TableDef, 3) + tableDefs := make([]*plan.TableDef, 3) // 1. create ivf-flat `metadata` table { @@ -2343,10 +2343,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c if err != nil { return nil, nil, err } - tableDefs[0] = &TableDef{ + tableDefs[0] = &plan.TableDef{ Name: indexTableName, TableType: catalog.SystemSI_IVFFLAT_TblType_Metadata, - Cols: make([]*ColDef, 2), + Cols: make([]*plan.ColDef, 2), } // 1.b indexDef1 init @@ -2356,10 +2356,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c } // 1.c columns: key (PK), val - tableDefs[0].Cols[0] = &ColDef{ + tableDefs[0].Cols[0] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Metadata_key, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -2370,10 +2370,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c OriginString: "", }, } - tableDefs[0].Cols[1] = &ColDef{ + tableDefs[0].Cols[1] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Metadata_val, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -2385,7 +2385,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c } // 1.d PK def - tableDefs[0].Pkey = &PrimaryKeyDef{ + tableDefs[0].Pkey = &plan.PrimaryKeyDef{ Names: []string{catalog.SystemSI_IVFFLAT_TblCol_Metadata_key}, PkeyColName: catalog.SystemSI_IVFFLAT_TblCol_Metadata_key, } @@ -2412,10 +2412,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c if err != nil { return nil, nil, err } - tableDefs[1] = &TableDef{ + tableDefs[1] = &plan.TableDef{ Name: indexTableName, TableType: catalog.SystemSI_IVFFLAT_TblType_Centroids, - Cols: make([]*ColDef, 4), + Cols: make([]*plan.ColDef, 4), } // 2.b indexDefs[1] init @@ -2425,7 +2425,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c } // 2.c columns: version, id, centroid, PRIMARY KEY (version,id) - tableDefs[1].Cols[0] = &ColDef{ + tableDefs[1].Cols[0] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Centroids_version, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2439,7 +2439,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c OriginString: "", }, } - tableDefs[1].Cols[1] = &ColDef{ + tableDefs[1].Cols[1] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Centroids_id, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2453,10 +2453,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c OriginString: "", }, } - tableDefs[1].Cols[2] = &ColDef{ + tableDefs[1].Cols[2] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Centroids_centroid, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: colMap[colName].Typ.Id, Width: colMap[colName].Typ.Width, Scale: colMap[colName].Typ.Scale, @@ -2472,7 +2472,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c tableDefs[1].Cols[3].Primary = true // 2.d PK def - tableDefs[1].Pkey = &PrimaryKeyDef{ + tableDefs[1].Pkey = &plan.PrimaryKeyDef{ Names: []string{ catalog.SystemSI_IVFFLAT_TblCol_Centroids_version, catalog.SystemSI_IVFFLAT_TblCol_Centroids_id, @@ -2502,10 +2502,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c if err != nil { return nil, nil, err } - tableDefs[2] = &TableDef{ + tableDefs[2] = &plan.TableDef{ Name: indexTableName, TableType: catalog.SystemSI_IVFFLAT_TblType_Entries, - Cols: make([]*ColDef, 5), + Cols: make([]*plan.ColDef, 5), } // 3.b indexDefs[2] init @@ -2515,7 +2515,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c } // 3.c columns: version, id, origin_pk, PRIMARY KEY (version,origin_pk) - tableDefs[2].Cols[0] = &ColDef{ + tableDefs[2].Cols[0] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Entries_version, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2529,7 +2529,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c OriginString: "", }, } - tableDefs[2].Cols[1] = &ColDef{ + tableDefs[2].Cols[1] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Entries_id, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2544,7 +2544,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c }, } - tableDefs[2].Cols[2] = &ColDef{ + tableDefs[2].Cols[2] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Entries_pk, Alg: plan.CompressType_Lz4, Typ: plan.Type{ @@ -2561,10 +2561,10 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c OriginString: "", }, } - tableDefs[2].Cols[3] = &ColDef{ + tableDefs[2].Cols[3] = &plan.ColDef{ Name: catalog.SystemSI_IVFFLAT_TblCol_Entries_entry, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: colMap[colName].Typ.Id, Width: colMap[colName].Typ.Width, Scale: colMap[colName].Typ.Scale, @@ -2581,7 +2581,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c tableDefs[2].Cols[4].Primary = true // 3.d PK def - tableDefs[2].Pkey = &PrimaryKeyDef{ + tableDefs[2].Pkey = &plan.PrimaryKeyDef{ Names: []string{ catalog.SystemSI_IVFFLAT_TblCol_Entries_version, catalog.SystemSI_IVFFLAT_TblCol_Entries_id, @@ -2631,7 +2631,7 @@ func buildIvfFlatSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, c // primary key (index_id, chunk_id) // ) -func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*ColDef, existedIndexes []*plan.IndexDef, pkeyName string) ([]*plan.IndexDef, []*TableDef, error) { +func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colMap map[string]*plan.ColDef, existedIndexes []*plan.IndexDef, pkeyName string) ([]*plan.IndexDef, []*plan.TableDef, error) { if pkeyName == "" || pkeyName == catalog.FakePrimaryKeyColName { return nil, nil, moerr.NewInternalErrorNoCtx("primary key cannot be empty for hnsw index") @@ -2670,7 +2670,7 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM } indexDefs := make([]*plan.IndexDef, 2) - tableDefs := make([]*TableDef, 2) + tableDefs := make([]*plan.TableDef, 2) // 1. create hnsw `metadata` table { @@ -2679,10 +2679,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM if err != nil { return nil, nil, err } - tableDefs[0] = &TableDef{ + tableDefs[0] = &plan.TableDef{ Name: indexTableName, TableType: catalog.Hnsw_TblType_Metadata, - Cols: make([]*ColDef, 4), + Cols: make([]*plan.ColDef, 4), } // 1.b indexDef1 init @@ -2692,10 +2692,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM } // 1.c columns: key (PK), val - tableDefs[0].Cols[0] = &ColDef{ + tableDefs[0].Cols[0] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Metadata_Index_Id, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: 128, Scale: 0, @@ -2707,10 +2707,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[0].Cols[1] = &ColDef{ + tableDefs[0].Cols[1] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Metadata_Checksum, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, }, @@ -2720,10 +2720,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[0].Cols[2] = &ColDef{ + tableDefs[0].Cols[2] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Metadata_Timestamp, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_int64), Width: 0, Scale: 0, @@ -2734,10 +2734,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[0].Cols[3] = &ColDef{ + tableDefs[0].Cols[3] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Metadata_Filesize, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_int64), Width: 0, Scale: 0, @@ -2750,7 +2750,7 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM } // 1.d PK def - tableDefs[0].Pkey = &PrimaryKeyDef{ + tableDefs[0].Pkey = &plan.PrimaryKeyDef{ Names: []string{catalog.Hnsw_TblCol_Metadata_Index_Id}, PkeyColName: catalog.Hnsw_TblCol_Metadata_Index_Id, } @@ -2777,10 +2777,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM if err != nil { return nil, nil, err } - tableDefs[1] = &TableDef{ + tableDefs[1] = &plan.TableDef{ Name: indexTableName, TableType: catalog.Hnsw_TblType_Storage, - Cols: make([]*ColDef, 5), + Cols: make([]*plan.ColDef, 5), } // 1.b indexDef1 init @@ -2790,10 +2790,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM } // 1.c columns: key (PK), val - tableDefs[1].Cols[0] = &ColDef{ + tableDefs[1].Cols[0] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Storage_Index_Id, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), Width: 128, Scale: 0, @@ -2804,10 +2804,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[1].Cols[1] = &ColDef{ + tableDefs[1].Cols[1] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Storage_Chunk_Id, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_int64), Width: 0, Scale: 0, @@ -2818,10 +2818,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[1].Cols[2] = &ColDef{ + tableDefs[1].Cols[2] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Storage_Data, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_blob), Width: 65536, Scale: 0, @@ -2832,10 +2832,10 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM OriginString: "", }, } - tableDefs[1].Cols[3] = &ColDef{ + tableDefs[1].Cols[3] = &plan.ColDef{ Name: catalog.Hnsw_TblCol_Storage_Tag, Alg: plan.CompressType_Lz4, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_int64), Width: 0, Scale: 0, @@ -2851,7 +2851,7 @@ func buildHnswSecondaryIndexDef(ctx CompilerContext, indexInfo *tree.Index, colM tableDefs[1].Cols[4].Alg = plan.CompressType_Lz4 tableDefs[1].Cols[4].Primary = true - tableDefs[1].Pkey = &PrimaryKeyDef{ + tableDefs[1].Pkey = &plan.PrimaryKeyDef{ Names: []string{catalog.Hnsw_TblCol_Storage_Index_Id, catalog.Hnsw_TblCol_Storage_Chunk_Id}, PkeyColName: catalog.CPrimaryKeyColName, @@ -2940,7 +2940,7 @@ func CreateIndexDef(indexInfo *tree.Index, return indexDef, nil } -func buildTruncateTable(stmt *tree.TruncateTable, ctx CompilerContext) (*Plan, error) { +func buildTruncateTable(stmt *tree.TruncateTable, ctx CompilerContext) (*plan.Plan, error) { truncateTable := &plan.TruncateTable{} truncateTable.Database = string(stmt.Name.SchemaName) @@ -3018,7 +3018,7 @@ func buildTruncateTable(stmt *tree.TruncateTable, ctx CompilerContext) (*Plan, e } } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_TRUNCATE_TABLE, @@ -3030,7 +3030,7 @@ func buildTruncateTable(stmt *tree.TruncateTable, ctx CompilerContext) (*Plan, e }, nil } -func buildDropTable(stmt *tree.DropTable, ctx CompilerContext) (*Plan, error) { +func buildDropTable(stmt *tree.DropTable, ctx CompilerContext) (*plan.Plan, error) { dropTable := &plan.DropTable{ IfExists: stmt.IfExists, } @@ -3148,7 +3148,7 @@ func buildDropTable(stmt *tree.DropTable, ctx CompilerContext) (*Plan, error) { dropTable.TableDef = tableDef dropTable.UpdateFkSqls = []string{getSqlForDeleteTable(dropTable.Database, dropTable.Table)} } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_TABLE, @@ -3160,7 +3160,7 @@ func buildDropTable(stmt *tree.DropTable, ctx CompilerContext) (*Plan, error) { }, nil } -func buildDropView(stmt *tree.DropView, ctx CompilerContext) (*Plan, error) { +func buildDropView(stmt *tree.DropView, ctx CompilerContext) (*plan.Plan, error) { dropTable := &plan.DropTable{ IfExists: stmt.IfExists, } @@ -3199,7 +3199,7 @@ func buildDropView(stmt *tree.DropView, ctx CompilerContext) (*Plan, error) { } dropTable.IsView = true - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_TABLE, @@ -3211,7 +3211,7 @@ func buildDropView(stmt *tree.DropView, ctx CompilerContext) (*Plan, error) { }, nil } -func buildCreateDatabase(stmt *tree.CreateDatabase, ctx CompilerContext) (*Plan, error) { +func buildCreateDatabase(stmt *tree.CreateDatabase, ctx CompilerContext) (*plan.Plan, error) { createDB := &plan.CreateDatabase{ IfNotExists: stmt.IfNotExists, @@ -3232,7 +3232,7 @@ func buildCreateDatabase(stmt *tree.CreateDatabase, ctx CompilerContext) (*Plan, } createDB.Sql = stmt.Sql - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_DATABASE, @@ -3244,7 +3244,7 @@ func buildCreateDatabase(stmt *tree.CreateDatabase, ctx CompilerContext) (*Plan, }, nil } -func buildDropDatabase(stmt *tree.DropDatabase, ctx CompilerContext) (*Plan, error) { +func buildDropDatabase(stmt *tree.DropDatabase, ctx CompilerContext) (*plan.Plan, error) { dropDB := &plan.DropDatabase{ IfExists: stmt.IfExists, Database: string(stmt.Name), @@ -3274,7 +3274,7 @@ func buildDropDatabase(stmt *tree.DropDatabase, ctx CompilerContext) (*Plan, err dropDB.UpdateFkSql = getSqlForDeleteDB(dropDB.Database) - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_DATABASE, @@ -3287,7 +3287,7 @@ func buildDropDatabase(stmt *tree.DropDatabase, ctx CompilerContext) (*Plan, err } // In MySQL, the CREATE INDEX syntax can only create one index instance at a time -func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*Plan, error) { +func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*plan.Plan, error) { createIndex := &plan.CreateIndex{} if len(stmt.Table.SchemaName) == 0 { createIndex.Database = ctx.DefaultDatabase() @@ -3340,7 +3340,7 @@ func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*Plan, error default: return nil, moerr.NewNotSupportedf(ctx.GetContext(), "statement: '%v'", tree.String(stmt, dialect.MYSQL)) } - colMap := make(map[string]*ColDef) + colMap := make(map[string]*plan.ColDef) for _, col := range tableDef.Cols { colMap[col.Name] = col } @@ -3355,7 +3355,7 @@ func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*Plan, error oriPriKeyName := getTablePriKeyName(tableDef.Pkey) createIndex.OriginTablePrimaryKey = oriPriKeyName - indexInfo := &plan.CreateTable{TableDef: &TableDef{}} + indexInfo := &plan.CreateTable{TableDef: &plan.TableDef{}} if uIdx != nil { if err := buildUniqueIndexTable(indexInfo, []*tree.UniqueIndex{uIdx}, colMap, oriPriKeyName, ctx); err != nil { return nil, err @@ -3378,7 +3378,7 @@ func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*Plan, error createIndex.Table = tableName createIndex.TableDef = tableDef - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_INDEX, @@ -3390,7 +3390,7 @@ func buildCreateIndex(stmt *tree.CreateIndex, ctx CompilerContext) (*Plan, error }, nil } -func buildDropIndex(stmt *tree.DropIndex, ctx CompilerContext) (*Plan, error) { +func buildDropIndex(stmt *tree.DropIndex, ctx CompilerContext) (*plan.Plan, error) { dropIndex := &plan.DropIndex{} if len(stmt.TableName.SchemaName) == 0 { dropIndex.Database = ctx.DefaultDatabase() @@ -3432,7 +3432,7 @@ func buildDropIndex(stmt *tree.DropIndex, ctx CompilerContext) (*Plan, error) { return nil, moerr.NewInternalErrorf(ctx.GetContext(), "not found index: %s", dropIndex.IndexName) } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_INDEX, @@ -3445,7 +3445,7 @@ func buildDropIndex(stmt *tree.DropIndex, ctx CompilerContext) (*Plan, error) { } // Get tabledef(col, viewsql, properties) for alterview. -func buildAlterView(stmt *tree.AlterView, ctx CompilerContext) (*Plan, error) { +func buildAlterView(stmt *tree.AlterView, ctx CompilerContext) (*plan.Plan, error) { viewName := string(stmt.Name.ObjectName) alterView := &plan.AlterView{ IfExists: stmt.IfExists, @@ -3500,7 +3500,7 @@ func buildAlterView(stmt *tree.AlterView, ctx CompilerContext) (*Plan, error) { alterView.TableDef.ViewSql = tableDef.ViewSql alterView.TableDef.Defs = tableDef.Defs - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_ALTER_VIEW, @@ -3512,7 +3512,7 @@ func buildAlterView(stmt *tree.AlterView, ctx CompilerContext) (*Plan, error) { }, nil } -func buildRenameTable(stmt *tree.RenameTable, ctx CompilerContext) (*Plan, error) { +func buildRenameTable(stmt *tree.RenameTable, ctx CompilerContext) (*plan.Plan, error) { alterTables := stmt.AlterTables renameTables := make([]*plan.AlterTable, 0) @@ -3590,7 +3590,7 @@ func buildRenameTable(stmt *tree.RenameTable, ctx CompilerContext) (*Plan, error renameTables = append(renameTables, alterTablePlan) } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_RENAME_TABLE, @@ -3611,7 +3611,7 @@ func formatTreeNode(opt tree.NodeFormatter) string { return ft.String() } -func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, error) { +func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*plan.Plan, error) { tableName := string(stmt.Table.ObjectName) databaseName := string(stmt.Table.SchemaName) if databaseName == "" { @@ -3650,7 +3650,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, ) } - colMap := make(map[string]*ColDef) + colMap := make(map[string]*plan.ColDef) for _, col := range tableDef.Cols { colMap[col.Name] = col } @@ -3828,7 +3828,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, } oriPriKeyName := getTablePriKeyName(tableDef.Pkey) - indexInfo := &plan.CreateTable{TableDef: &TableDef{}} + indexInfo := &plan.CreateTable{TableDef: &plan.TableDef{}} if err := buildUniqueIndexTable( indexInfo, []*tree.UniqueIndex{def}, @@ -3881,7 +3881,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, } oriPriKeyName := getTablePriKeyName(tableDef.Pkey) - indexInfo := &plan.CreateTable{TableDef: &TableDef{}} + indexInfo := &plan.CreateTable{TableDef: &plan.TableDef{}} if err := buildFullTextIndexTable( indexInfo, []*tree.FullTextIndex{def}, @@ -3937,7 +3937,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, oriPriKeyName := getTablePriKeyName(tableDef.Pkey) - indexInfo := &plan.CreateTable{TableDef: &TableDef{}} + indexInfo := &plan.CreateTable{TableDef: &plan.TableDef{}} if err := buildSecondaryIndexDef( indexInfo, []*tree.Index{def}, @@ -4152,7 +4152,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, } if alterTable.CopyTableDef == nil { - alterTable.CopyTableDef = DeepCopyTableDef(tableDef, true) + alterTable.CopyTableDef = plan.DeepCopyTableDef(tableDef, true) } // update new column info to copy_table_def @@ -4172,7 +4172,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, } if alterTable.CopyTableDef == nil { - alterTable.CopyTableDef = DeepCopyTableDef(tableDef, true) + alterTable.CopyTableDef = plan.DeepCopyTableDef(tableDef, true) } col := FindColumn( @@ -4278,7 +4278,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, alterTable.DetectSqls = detectSqls alterTable.UpdateFkSqls = updateSqls - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_ALTER_TABLE, @@ -4290,7 +4290,7 @@ func buildAlterTableInplace(stmt *tree.AlterTable, ctx CompilerContext) (*Plan, }, nil } -func buildLockTables(stmt *tree.LockTableStmt, ctx CompilerContext) (*Plan, error) { +func buildLockTables(stmt *tree.LockTableStmt, ctx CompilerContext) (*plan.Plan, error) { lockTables := make([]*plan.TableLockInfo, 0, len(stmt.TableLocks)) uniqueTableName := make(map[string]bool) @@ -4340,7 +4340,7 @@ func buildLockTables(stmt *tree.LockTableStmt, ctx CompilerContext) (*Plan, erro TableLocks: lockTables, } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_LOCK_TABLES, @@ -4352,9 +4352,9 @@ func buildLockTables(stmt *tree.LockTableStmt, ctx CompilerContext) (*Plan, erro }, nil } -func buildUnLockTables(stmt *tree.UnLockTableStmt, ctx CompilerContext) (*Plan, error) { +func buildUnLockTables(stmt *tree.UnLockTableStmt, ctx CompilerContext) (*plan.Plan, error) { unLockTables := &plan.UnLockTables{} - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_UNLOCK_TABLES, @@ -4391,7 +4391,7 @@ type FkData struct { // for fk refer except the self refer, it is same as the previous one. // but for fk self refer, it is different in not checking fk self refer instantly. // because it is not ready. It should be checked after the pk,uk has been ready. -func getForeignKeyData(ctx CompilerContext, dbName string, tableDef *TableDef, def *tree.ForeignKey) (*FkData, error) { +func getForeignKeyData(ctx CompilerContext, dbName string, tableDef *plan.TableDef, def *tree.ForeignKey) (*FkData, error) { refer := def.Refer fkData := FkData{ Def: &plan.ForeignKeyDef{ @@ -4408,7 +4408,7 @@ func getForeignKeyData(ctx CompilerContext, dbName string, tableDef *TableDef, d Cols: make([]string, len(def.KeyParts)), } fkData.ColTyps = make(map[int]*plan.Type) - name2ColDef := make(map[string]*ColDef) + name2ColDef := make(map[string]*plan.ColDef) for _, colDef := range tableDef.Cols { name2ColDef[colDef.Name] = colDef } @@ -4535,7 +4535,7 @@ Case 3: "a, c" can not be used due to they belong to the different primary key / unique key */ -func checkFkColsAreValid(ctx CompilerContext, fkData *FkData, parentTableDef *TableDef) error { +func checkFkColsAreValid(ctx CompilerContext, fkData *FkData, parentTableDef *plan.TableDef) error { //colId in parent table-> position in parent table columnIdPos := make(map[uint64]int) //columnName in parent table -> position in parent table @@ -4645,7 +4645,7 @@ func buildFkDataOfForwardRefer(ctx CompilerContext, } fkData.ColTyps = make(map[int]*plan.Type) - name2ColDef := make(map[string]*ColDef) + name2ColDef := make(map[string]*plan.ColDef) for _, def := range childTableDef.Cols { name2ColDef[def.Name] = def } @@ -4704,7 +4704,7 @@ func parseDuration(ctx context.Context, period uint64, unit string) (time.Durati return time.Duration(seconds), nil } -func buildCreatePitr(stmt *tree.CreatePitr, ctx CompilerContext) (*Plan, error) { +func buildCreatePitr(stmt *tree.CreatePitr, ctx CompilerContext) (*plan.Plan, error) { // only sys can create cluster level pitr currentAccount := ctx.GetAccountName() currentAccountId, err := ctx.GetAccountId() @@ -4779,7 +4779,7 @@ func buildCreatePitr(stmt *tree.CreatePitr, ctx CompilerContext) (*Plan, error) tableId = tableDef.TblId } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_PITR, @@ -4806,7 +4806,7 @@ func buildCreatePitr(stmt *tree.CreatePitr, ctx CompilerContext) (*Plan, error) }, nil } -func buildDropPitr(stmt *tree.DropPitr, ctx CompilerContext) (*Plan, error) { +func buildDropPitr(stmt *tree.DropPitr, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_DROP_PITR // Remove privilege check, no account ID validation @@ -4816,7 +4816,7 @@ func buildDropPitr(stmt *tree.DropPitr, ctx CompilerContext) (*Plan, error) { Name: string(stmt.Name), } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: ddlType, @@ -4828,12 +4828,12 @@ func buildDropPitr(stmt *tree.DropPitr, ctx CompilerContext) (*Plan, error) { }, nil } -func buildCreateCDC(stmt *tree.CreateCDC, ctx CompilerContext) (*Plan, error) { +func buildCreateCDC(stmt *tree.CreateCDC, ctx CompilerContext) (*plan.Plan, error) { accountId, err := ctx.GetAccountId() if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_CDC, @@ -4856,12 +4856,12 @@ func buildCreateCDC(stmt *tree.CreateCDC, ctx CompilerContext) (*Plan, error) { }, nil } -func buildDropCDC(stmt *tree.DropCDC, ctx CompilerContext) (*Plan, error) { +func buildDropCDC(stmt *tree.DropCDC, ctx CompilerContext) (*plan.Plan, error) { accountId, err := ctx.GetAccountId() if err != nil { return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_DROP_CDC, diff --git a/pkg/sql/plan/build_ddl_test.go b/pkg/sql/planner/build_ddl_test.go similarity index 95% rename from pkg/sql/plan/build_ddl_test.go rename to pkg/sql/planner/build_ddl_test.go index f1b3065040095..89db9f78e661f 100644 --- a/pkg/sql/plan/build_ddl_test.go +++ b/pkg/sql/planner/build_ddl_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -41,8 +41,8 @@ func TestBuildAlterView(t *testing.T) { defer ctrl.Finish() type arg struct { - obj *ObjectRef - table *TableDef + obj *plan.ObjectRef + table *plan.TableDef } sql1 := "alter view v as select a from a" @@ -82,7 +82,7 @@ func TestBuildAlterView(t *testing.T) { &plan.ObjectRef{}, &plan.TableDef{ TableType: catalog.SystemOrdinaryRel, - Cols: []*ColDef{ + Cols: []*plan.ColDef{ { Name: "a", Typ: plan.Type{ @@ -103,7 +103,7 @@ func TestBuildAlterView(t *testing.T) { ctx.EXPECT().GetUserName().Return("sys:dump").AnyTimes() ctx.EXPECT().DefaultDatabase().Return("db").AnyTimes() ctx.EXPECT().Resolve(gomock.Any(), gomock.Any(), gomock.Any()).DoAndReturn( - func(schemaName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { + func(schemaName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { if schemaName == "" { schemaName = "db" } @@ -168,8 +168,8 @@ func TestBuildLockTables(t *testing.T) { defer ctrl.Finish() type arg struct { - obj *ObjectRef - table *TableDef + obj *plan.ObjectRef + table *plan.TableDef } store := make(map[string]arg) @@ -182,7 +182,7 @@ func TestBuildLockTables(t *testing.T) { &plan.ObjectRef{}, &plan.TableDef{ TableType: catalog.SystemOrdinaryRel, - Cols: []*ColDef{ + Cols: []*plan.ColDef{ { Name: "a", Typ: plan.Type{ @@ -197,7 +197,7 @@ func TestBuildLockTables(t *testing.T) { ctx := NewMockCompilerContext2(ctrl) ctx.EXPECT().DefaultDatabase().Return("db").AnyTimes() ctx.EXPECT().Resolve(gomock.Any(), gomock.Any(), gomock.Any()).DoAndReturn( - func(schemaName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { + func(schemaName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { if schemaName == "" { schemaName = "db" } @@ -226,7 +226,7 @@ func TestBuildLockTables(t *testing.T) { &plan.ObjectRef{}, &plan.TableDef{ TableType: catalog.SystemOrdinaryRel, - Cols: []*ColDef{ + Cols: []*plan.ColDef{ { Name: "a", Typ: plan.Type{ @@ -624,7 +624,7 @@ func TestBuildCreatePitr(t *testing.T) { ctx := &MockCompilerContext{} ctx.GetAccountNameFunc = func() string { return "sys" } ctx.GetAccountIdFunc = func() (uint32, error) { return 1, nil } - ctx.DatabaseExistsFunc = func(string, *Snapshot) bool { return false } + ctx.DatabaseExistsFunc = func(string, *plan.Snapshot) bool { return false } stmt := baseStmt() stmt.Level = tree.PITRLEVELDATABASE stmt.DatabaseName = "db1" @@ -637,8 +637,8 @@ func TestBuildCreatePitr(t *testing.T) { ctx := &MockCompilerContext{} ctx.GetAccountNameFunc = func() string { return "sys" } ctx.GetAccountIdFunc = func() (uint32, error) { return 1, nil } - ctx.DatabaseExistsFunc = func(string, *Snapshot) bool { return true } - ctx.GetDatabaseIdFunc = func(string, *Snapshot) (uint64, error) { return 123, nil } + ctx.DatabaseExistsFunc = func(string, *plan.Snapshot) bool { return true } + ctx.GetDatabaseIdFunc = func(string, *plan.Snapshot) (uint64, error) { return 123, nil } stmt := baseStmt() stmt.Level = tree.PITRLEVELDATABASE stmt.DatabaseName = "db1" @@ -651,8 +651,8 @@ func TestBuildCreatePitr(t *testing.T) { ctx := &MockCompilerContext{} ctx.GetAccountNameFunc = func() string { return "sys" } ctx.GetAccountIdFunc = func() (uint32, error) { return 1, nil } - ctx.DatabaseExistsFunc = func(string, *Snapshot) bool { return true } - ctx.ResolveFunc = func(string, string, *Snapshot) (*ObjectRef, *TableDef) { return nil, nil } + ctx.DatabaseExistsFunc = func(string, *plan.Snapshot) bool { return true } + ctx.ResolveFunc = func(string, string, *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef) { return nil, nil } stmt := baseStmt() stmt.Level = tree.PITRLEVELTABLE stmt.DatabaseName = "db1" @@ -666,8 +666,10 @@ func TestBuildCreatePitr(t *testing.T) { ctx := &MockCompilerContext{} ctx.GetAccountNameFunc = func() string { return "sys" } ctx.GetAccountIdFunc = func() (uint32, error) { return 1, nil } - ctx.DatabaseExistsFunc = func(string, *Snapshot) bool { return true } - ctx.ResolveFunc = func(string, string, *Snapshot) (*ObjectRef, *TableDef) { return &ObjectRef{}, &TableDef{TblId: 456} } + ctx.DatabaseExistsFunc = func(string, *plan.Snapshot) bool { return true } + ctx.ResolveFunc = func(string, string, *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef) { + return &plan.ObjectRef{}, &plan.TableDef{TblId: 456} + } stmt := baseStmt() stmt.Level = tree.PITRLEVELTABLE stmt.DatabaseName = "db1" diff --git a/pkg/sql/plan/build_delete.go b/pkg/sql/planner/build_delete.go similarity index 98% rename from pkg/sql/plan/build_delete.go rename to pkg/sql/planner/build_delete.go index f00246a831ffb..82a2acb276f77 100644 --- a/pkg/sql/plan/build_delete.go +++ b/pkg/sql/planner/build_delete.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "time" @@ -22,7 +22,7 @@ import ( v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" ) -func buildDelete(stmt *tree.Delete, ctx CompilerContext, isPrepareStmt bool) (*Plan, error) { +func buildDelete(stmt *tree.Delete, ctx CompilerContext, isPrepareStmt bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildDeleteHistogram.Observe(time.Since(start).Seconds()) @@ -105,7 +105,7 @@ func buildDelete(stmt *tree.Delete, ctx CompilerContext, isPrepareStmt bool) (*P builder.tempOptimizeForDML() reCheckifNeedLockWholeTable(builder) query.StmtType = plan.Query_DELETE - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, diff --git a/pkg/sql/plan/build_dml_util.go b/pkg/sql/planner/build_dml_util.go similarity index 92% rename from pkg/sql/plan/build_dml_util.go rename to pkg/sql/planner/build_dml_util.go index f9a7037d6d929..e5e28da608b50 100644 --- a/pkg/sql/plan/build_dml_util.go +++ b/pkg/sql/planner/build_dml_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -22,9 +22,6 @@ import ( "github.com/bytedance/sonic" "github.com/google/uuid" - "go.uber.org/zap" - "golang.org/x/exp/slices" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" moruntime "github.com/matrixorigin/matrixone/pkg/common/runtime" @@ -32,12 +29,14 @@ import ( "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/txn/trace" "github.com/matrixorigin/matrixone/pkg/util/executor" "github.com/matrixorigin/matrixone/pkg/util/sysview" + "go.uber.org/zap" + "golang.org/x/exp/slices" ) // TODO: choose either PostInsertFullText or PreInsertFullText @@ -81,8 +80,8 @@ func putDeleteNodeInfo(info *deleteNodeInfo) { } type dmlPlanCtx struct { - objRef *ObjectRef - tableDef *TableDef + objRef *plan.ObjectRef + tableDef *plan.TableDef beginIdx int sourceStep int32 isMulti bool @@ -97,14 +96,14 @@ type dmlPlanCtx struct { lockTable bool //we need lock table in stmt: delete from tbl checkInsertPkDup bool //if we need check for duplicate values in insert batch. eg:insert into t values (1). load data will not check updatePkCol bool //if update stmt will update the primary key or one of pks - pkFilterExprs []*Expr + pkFilterExprs []*plan.Expr isDeleteWithoutFilters bool } // information of deleteNode, which is about the deleted table type deleteNodeInfo struct { - objRef *ObjectRef - tableDef *TableDef + objRef *plan.ObjectRef + tableDef *plan.TableDef IsClusterTable bool deleteIndex int // The array index position of the rowid column indexTableNames []string @@ -118,15 +117,15 @@ type deleteNodeInfo struct { // buildInsertPlans build insert plan. func buildInsertPlans( ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, stmt *tree.Insert, - objRef *ObjectRef, tableDef *TableDef, lastNodeId int32, ifExistAutoPkCol bool, + objRef *plan.ObjectRef, tableDef *plan.TableDef, lastNodeId int32, ifExistAutoPkCol bool, insertWithoutUniqueKeyMap map[string]bool, ifInsertFromUniqueColMap map[string]bool, ) error { var err error var insertColsNameFromStmt []string - var pkFilterExpr []*Expr - var newPartitionExpr *Expr + var pkFilterExpr []*plan.Expr + var newPartitionExpr *plan.Expr if stmt != nil { insertColsNameFromStmt, err = getInsertColsFromStmt(ctx.GetContext(), stmt, tableDef) if err != nil { @@ -229,7 +228,7 @@ func buildUpdatePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC Children: []int32{preNodeId}, PreInsertCtx: &plan.PreInsertCtx{ Ref: updatePlanCtx.objRef, - TableDef: DeepCopyTableDef(updatePlanCtx.tableDef, true), + TableDef: plan.DeepCopyTableDef(updatePlanCtx.tableDef, true), HasAutoCol: true, ColOffset: colOffset, IsNewUpdate: true, @@ -249,7 +248,7 @@ func buildUpdatePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC // sink_scan -> project -> preinsert -> sink lastNodeId = appendSinkScanNode(builder, bindCtx, updatePlanCtx.sourceStep) lastNode := builder.qry.Nodes[lastNodeId] - newCols := make([]*ColDef, 0, len(updatePlanCtx.tableDef.Cols)) + newCols := make([]*plan.ColDef, 0, len(updatePlanCtx.tableDef.Cols)) oldRowIdPos := len(updatePlanCtx.tableDef.Cols) - 1 for _, col := range updatePlanCtx.tableDef.Cols { if col.Hidden && col.Name != catalog.FakePrimaryKeyColName { @@ -259,7 +258,7 @@ func buildUpdatePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC } updatePlanCtx.tableDef.Cols = newCols insertColLength := len(updatePlanCtx.insertColPos) + 1 - projectList := make([]*Expr, insertColLength) + projectList := make([]*plan.Expr, insertColLength) for i, idx := range updatePlanCtx.insertColPos { name := "" if col, ok := lastNode.ProjectList[idx].Expr.(*plan.Expr_Col); ok { @@ -286,7 +285,7 @@ func buildUpdatePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC } //append project node - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectList, @@ -301,7 +300,7 @@ func buildUpdatePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC // build insert plan. insertBindCtx := NewBindContext(builder, nil) - var partitionExpr *Expr + var partitionExpr *plan.Expr ifExistAutoPkCol := false ifNeedCheckPkDup := true var indexSourceColTypes []*plan.Type @@ -457,8 +456,8 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC } //delete data in parent table may trigger some actions in the child table - var childObjRef *ObjectRef - var childTableDef *TableDef + var childObjRef *plan.ObjectRef + var childTableDef *plan.TableDef if tableId == 0 { //fk self refer childObjRef = delCtx.objRef @@ -472,14 +471,14 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC childPosMap := make(map[string]int32) childTypMap := make(map[string]*plan.Type) childId2name := make(map[uint64]string) - childProjectList := make([]*Expr, len(childTableDef.Cols)) - childForJoinProject := make([]*Expr, len(childTableDef.Cols)) + childProjectList := make([]*plan.Expr, len(childTableDef.Cols)) + childForJoinProject := make([]*plan.Expr, len(childTableDef.Cols)) childRowIdPos := -1 for idx, col := range childTableDef.Cols { childPosMap[col.Name] = int32(idx) childTypMap[col.Name] = &col.Typ childId2name[col.ColId] = col.Name - childProjectList[idx] = &Expr{ + childProjectList[idx] = &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -488,7 +487,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC }, }, } - childForJoinProject[idx] = &Expr{ + childForJoinProject[idx] = &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -524,14 +523,14 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC } // build join conds - joinConds := make([]*Expr, len(fk.Cols)) - rightConds := make([]*Expr, len(fk.Cols)) - leftConds := make([]*Expr, len(fk.Cols)) + joinConds := make([]*plan.Expr, len(fk.Cols)) + rightConds := make([]*plan.Expr, len(fk.Cols)) + leftConds := make([]*plan.Expr, len(fk.Cols)) // use for join's projection & filter's condExpr - var oneLeftCond *Expr + var oneLeftCond *plan.Expr var oneLeftCondName string updateChildColPosMap := make(map[string]int) - updateChildColExpr := make([]*Expr, len(fk.Cols)) // use for update + updateChildColExpr := make([]*plan.Expr, len(fk.Cols)) // use for update insertColPos := make([]int, 0, len(childTableDef.Cols)-1) // use for update childColLength := len(childTableDef.Cols) childTablePkMap := make(map[string]struct{}) @@ -545,7 +544,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC childColumnName := col.Name originColumnName := idNameMap[fk.ForeignCols[i]] - leftExpr := &Expr{ + leftExpr := &plan.Expr{ Typ: *nameTypMap[originColumnName], Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -556,7 +555,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC }, } if pos, ok := delCtx.updateColPosMap[originColumnName]; ok { - updateChildColExpr[i] = &Expr{ + updateChildColExpr[i] = &plan.Expr{ Typ: baseProject[pos].Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -583,7 +582,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC if _, exists := childTablePkMap[childColumnName]; exists { updatePk = true } - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{leftExpr, rightExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return err } @@ -618,13 +617,13 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC lastNodeId = appendSinkScanNode(builder, bindCtx, delCtx.sourceStep) // deal with case: update t1 set a = a. then do not need to check constraint if isUpdate { - var filterExpr, tmpExpr *Expr + var filterExpr, tmpExpr *plan.Expr for updateName, newIdx := range updateRefColumn { oldIdx := nameIdxMap[updateName] - tmpExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "!=", []*Expr{{ + tmpExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "!=", []*plan.Expr{{ Typ: *nameTypMap[updateName], Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: oldIdx, Name: updateName, }, @@ -632,7 +631,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC }, { Typ: *nameTypMap[updateName], Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: newIdx, Name: updateName, }, @@ -644,7 +643,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC if filterExpr == nil { filterExpr = tmpExpr } else { - filterExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "or", []*Expr{filterExpr, tmpExpr}) + filterExpr, err = BindFuncExprImplByPlanExpr(builder.GetContext(), "or", []*plan.Expr{filterExpr, tmpExpr}) if err != nil { return nil } @@ -653,7 +652,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{filterExpr}, + FilterList: []*plan.Expr{filterExpr}, ProjectList: getProjectionByLastNode(builder, lastNodeId), }, bindCtx) } @@ -671,7 +670,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC update t1 set a = NULL where a = 4; --> ERROR 20101 (HY000): internal error: unexpected input batch for column expression */ - copiedTableDef := DeepCopyTableDef(childTableDef, true) + copiedTableDef := plan.DeepCopyTableDef(childTableDef, true) rightId := builder.appendNode(&plan.Node{ NodeType: plan.Node_TABLE_SCAN, Stats: &plan.Stats{}, @@ -685,10 +684,10 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC Children: []int32{lastNodeId, rightId}, JoinType: plan.Node_SEMI, OnList: joinConds, - ProjectList: []*Expr{oneLeftCond}, + ProjectList: []*plan.Expr{oneLeftCond}, }, bindCtx) - colExpr := &Expr{ + colExpr := &plan.Expr{ Typ: oneLeftCond.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -697,18 +696,18 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC }, } errExpr := makePlan2StringConstExprWithType("Cannot delete or update a parent row: a foreign key constraint fails") - isEmptyExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isempty", []*Expr{colExpr}) + isEmptyExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isempty", []*plan.Expr{colExpr}) if err != nil { return err } - assertExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*Expr{isEmptyExpr, errExpr}) + assertExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "assert", []*plan.Expr{isEmptyExpr, errExpr}) if err != nil { return err } - filterNode := &Node{ + filterNode := &plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{assertExpr}, + FilterList: []*plan.Expr{assertExpr}, ProjectList: getProjectionByLastNode(builder, lastNodeId), IsEnd: true, } @@ -721,7 +720,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC NodeType: plan.Node_TABLE_SCAN, Stats: &plan.Stats{}, ObjRef: childObjRef, - TableDef: DeepCopyTableDef(childTableDef, true), + TableDef: plan.DeepCopyTableDef(childTableDef, true), ProjectList: childProjectList, }, bindCtx) lastNodeId = builder.appendNode(&plan.Node{ @@ -737,13 +736,13 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC projectProjection = append(projectProjection, &plan.Expr{ Typ: e.Typ, Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, }) } - lastNodeId = builder.appendNode(&Node{ + lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -800,7 +799,7 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC upPlanCtx := getDmlPlanCtx() upPlanCtx.objRef = childObjRef - upPlanCtx.tableDef = DeepCopyTableDef(childTableDef, true) + upPlanCtx.tableDef = plan.DeepCopyTableDef(childTableDef, true) upPlanCtx.updateColLength = len(rightConds) upPlanCtx.isMulti = false upPlanCtx.rowIdPos = childRowIdPos @@ -867,9 +866,9 @@ func buildDeletePlans(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindC // update f set a = 10 where b=1; we need update c only once for 2 rows. not three times for 6 rows. func appendAggNodeForFkJoin(builder *QueryBuilder, bindCtx *BindContext, lastNodeId int32) int32 { groupByList := getProjectionByLastNode(builder, lastNodeId) - aggProject := make([]*Expr, len(groupByList)) + aggProject := make([]*plan.Expr, len(groupByList)) for i, e := range groupByList { - aggProject[i] = &Expr{ + aggProject[i] = &plan.Expr{ Typ: e.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -879,7 +878,7 @@ func appendAggNodeForFkJoin(builder *QueryBuilder, bindCtx *BindContext, lastNod }, } } - lastNodeId = builder.appendNode(&Node{ + lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_AGG, GroupBy: groupByList, Children: []int32{lastNodeId}, @@ -893,9 +892,9 @@ func appendAggNodeForFkJoin(builder *QueryBuilder, bindCtx *BindContext, lastNod // buildInsertPlansWithRelatedHiddenTable build insert plan recursively for origin table func buildInsertPlansWithRelatedHiddenTable( - stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, - tableDef *TableDef, updateColLength int, sourceStep int32, addAffectedRows bool, isFkRecursionCall bool, - updatePkCol bool, pkFilterExprs []*Expr, partitionExpr *Expr, ifExistAutoPkCol bool, + stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, + tableDef *plan.TableDef, updateColLength int, sourceStep int32, addAffectedRows bool, isFkRecursionCall bool, + updatePkCol bool, pkFilterExprs []*plan.Expr, partitionExpr *plan.Expr, ifExistAutoPkCol bool, checkInsertPkDupForHiddenIndexTable bool, indexSourceColTypes []*plan.Type, fuzzymessage *OriginTableMessageForFuzzy, insertWithoutUniqueKeyMap map[string]bool, ifInsertFromUniqueColMap map[string]bool, updateColPosMap map[string]int, skipIndexesCopy map[string]bool, @@ -939,7 +938,7 @@ func buildInsertPlansWithRelatedHiddenTable( multiTableIndexes[indexdef.IndexName] = &MultiTableIndex{ IndexAlgo: catalog.ToLower(indexdef.IndexAlgo), IndexAlgoParams: indexdef.IndexAlgoParams, - IndexDefs: make(map[string]*IndexDef), + IndexDefs: make(map[string]*plan.IndexDef), } } multiTableIndexes[indexdef.IndexName].IndexDefs[catalog.ToLower(indexdef.IndexAlgoTableType)] = indexdef @@ -997,9 +996,9 @@ func buildInsertPlansWithRelatedHiddenTable( // sink_scan -> Fuzzyfilter -- (if need to check pk duplicate) // table_scan -----^ func makeOneInsertPlan( - ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, + ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, updateColLength int, sourceStep int32, addAffectedRows bool, isFkRecursionCall bool, updatePkCol bool, - pkFilterExprs []*Expr, partitionExpr *Expr, ifExistAutoPkCol bool, ifCheckPkDup bool, ifInsertFromUnique bool, + pkFilterExprs []*plan.Expr, partitionExpr *plan.Expr, ifExistAutoPkCol bool, ifCheckPkDup bool, ifInsertFromUnique bool, indexSourceColTypes []*plan.Type, fuzzymessage *OriginTableMessageForFuzzy, ) (err error) { @@ -1034,7 +1033,7 @@ func makeOneInsertPlan( // appendPureInsertBranch appends the pure insert branch to the query builder. // It includes the sink scan node, project node (if necessary), and insert node. // The last node ID of the branch is returned. -func appendPureInsertBranch(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, sourceStep int32, addAffectedRows bool) { +func appendPureInsertBranch(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, sourceStep int32, addAffectedRows bool) { lastNodeId := appendSinkScanNode(builder, bindCtx, sourceStep) // append project node if necessary @@ -1044,7 +1043,7 @@ func appendPureInsertBranch(ctx CompilerContext, builder *QueryBuilder, bindCtx projectProjection = projectProjection[:len(tableDef.Cols)] } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -1059,7 +1058,7 @@ func appendPureInsertBranch(ctx CompilerContext, builder *QueryBuilder, bindCtx insertProjection = insertProjection[:len(tableDef.Cols)] } - insertNode := &Node{ + insertNode := &plan.Node{ NodeType: plan.Node_INSERT, Children: []int32{lastNodeId}, ObjRef: objRef, @@ -1101,11 +1100,11 @@ func makeOneDeletePlan( }, }, } - filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "is_not_null", []*Expr{rowIdColExpr}) + filterExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "is_not_null", []*plan.Expr{rowIdColExpr}) if err != nil { return -1, err } - filterNode := &Node{ + filterNode := &plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, FilterList: []*plan.Expr{filterExpr}, @@ -1125,7 +1124,7 @@ func makeOneDeletePlan( // lockTarget.IsPartitionTable = true // lockTarget.PartitionTableIds = delNodeInfo.partTableIDs // } - lockNode := &Node{ + lockNode := &plan.Node{ NodeType: plan.Node_LOCK_OP, Children: []int32{lastNodeId}, LockTargets: []*plan.LockTarget{lockTarget}, @@ -1147,7 +1146,7 @@ func makeOneDeletePlan( truncateTable.IsDelete = true } // append delete node - deleteNode := &Node{ + deleteNode := &plan.Node{ NodeType: plan.Node_DELETE, Children: []int32{lastNodeId}, // ProjectList: getProjectionByLastNode(builder, lastNodeId), @@ -1167,13 +1166,13 @@ func makeOneDeletePlan( return lastNodeId, nil } -func getProjectionByLastNodeForRightJoin(builder *QueryBuilder, lastNodeId int32) []*Expr { +func getProjectionByLastNodeForRightJoin(builder *QueryBuilder, lastNodeId int32) []*plan.Expr { lastNode := builder.qry.Nodes[lastNodeId] projLength := len(lastNode.ProjectList) if projLength == 0 { return getProjectionByLastNode(builder, lastNode.Children[0]) } - projection := make([]*Expr, len(lastNode.ProjectList)) + projection := make([]*plan.Expr, len(lastNode.ProjectList)) for i, expr := range lastNode.ProjectList { name := "" if col, ok := expr.Expr.(*plan.Expr_Col); ok { @@ -1193,13 +1192,13 @@ func getProjectionByLastNodeForRightJoin(builder *QueryBuilder, lastNodeId int32 return projection } -func getProjectionByLastNode(builder *QueryBuilder, lastNodeId int32) []*Expr { +func getProjectionByLastNode(builder *QueryBuilder, lastNodeId int32) []*plan.Expr { lastNode := builder.qry.Nodes[lastNodeId] projLength := len(lastNode.ProjectList) if projLength == 0 { return getProjectionByLastNode(builder, lastNode.Children[0]) } - projection := make([]*Expr, len(lastNode.ProjectList)) + projection := make([]*plan.Expr, len(lastNode.ProjectList)) for i, expr := range lastNode.ProjectList { name := "" if col, ok := expr.Expr.(*plan.Expr_Col); ok { @@ -1219,13 +1218,13 @@ func getProjectionByLastNode(builder *QueryBuilder, lastNodeId int32) []*Expr { return projection } -func getProjectionByLastNodeWithTag(builder *QueryBuilder, lastNodeId, tag int32) []*Expr { +func getProjectionByLastNodeWithTag(builder *QueryBuilder, lastNodeId, tag int32) []*plan.Expr { lastNode := builder.qry.Nodes[lastNodeId] projLength := len(lastNode.ProjectList) if projLength == 0 { return getProjectionByLastNodeWithTag(builder, lastNode.Children[0], tag) } - projection := make([]*Expr, len(lastNode.ProjectList)) + projection := make([]*plan.Expr, len(lastNode.ProjectList)) for i, expr := range lastNode.ProjectList { name := "" if col, ok := expr.Expr.(*plan.Expr_Col); ok { @@ -1245,7 +1244,7 @@ func getProjectionByLastNodeWithTag(builder *QueryBuilder, lastNodeId, tag int32 return projection } -func haveUniqueKey(tableDef *TableDef) bool { +func haveUniqueKey(tableDef *plan.TableDef) bool { for _, indexdef := range tableDef.Indexes { if indexdef.Unique { return true @@ -1254,7 +1253,7 @@ func haveUniqueKey(tableDef *TableDef) bool { return false } -func haveSecondaryKey(tableDef *TableDef) bool { +func haveSecondaryKey(tableDef *plan.TableDef) bool { for _, indexdef := range tableDef.Indexes { if !indexdef.Unique && indexdef.TableExist { return true @@ -1268,7 +1267,7 @@ func haveSecondaryKey(tableDef *TableDef) bool { // 1. There is no primary key in the table. // 2. The unique key is the only unique key of the table. // 3. The columns of the unique key are not null by default. -func isPrimaryKey(tableDef *TableDef, colNames []string) bool { +func isPrimaryKey(tableDef *plan.TableDef, colNames []string) bool { // Ensure there is no real primary key in the table. // FakePrimaryKeyColName is for tables without a primary key. // So we need to exclude FakePrimaryKeyColName. @@ -1308,8 +1307,8 @@ func isMultiplePriKey(indexdef *plan.IndexDef) bool { } // makeDeleteNodeInfo Get `DeleteNode` based on TableDef -func makeDeleteNodeInfo(ctx CompilerContext, objRef *ObjectRef, tableDef *TableDef, - deleteIdx int, addAffectedRows bool, pkPos int, pkTyp Type, lockTable bool) *deleteNodeInfo { +func makeDeleteNodeInfo(ctx CompilerContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, + deleteIdx int, addAffectedRows bool, pkPos int, pkTyp plan.Type, lockTable bool) *deleteNodeInfo { delNodeInfo := getDeleteNodeInfo() delNodeInfo.objRef = objRef delNodeInfo.tableDef = tableDef @@ -1350,7 +1349,7 @@ func appendSinkScanNode(builder *QueryBuilder, bindCtx *BindContext, sourceStep lastNodeId := builder.qry.Steps[sourceStep] // lastNode := builder.qry.Nodes[lastNodeId] sinkScanProject := getProjectionByLastNode(builder, lastNodeId) - sinkScanNode := &Node{ + sinkScanNode := &plan.Node{ NodeType: plan.Node_SINK_SCAN, SourceStep: []int32{sourceStep}, ProjectList: sinkScanProject, @@ -1363,17 +1362,17 @@ func appendSinkScanNodeWithTag(builder *QueryBuilder, bindCtx *BindContext, sour lastNodeId := builder.qry.Steps[sourceStep] // lastNode := builder.qry.Nodes[lastNodeId] sinkScanProject := getProjectionByLastNodeWithTag(builder, lastNodeId, tag) - sinkScanNode := &Node{ + sinkScanNode := &plan.Node{ NodeType: plan.Node_SINK_SCAN, SourceStep: []int32{sourceStep}, ProjectList: sinkScanProject, BindingTags: []int32{tag}, - TableDef: &TableDef{Name: bindCtx.cteName}, + TableDef: &plan.TableDef{Name: bindCtx.cteName}, } b := bindCtx.bindings[0] - sinkScanNode.TableDef.Cols = make([]*ColDef, len(b.cols)) + sinkScanNode.TableDef.Cols = make([]*plan.ColDef, len(b.cols)) for i, col := range b.cols { - sinkScanNode.TableDef.Cols[i] = &ColDef{ + sinkScanNode.TableDef.Cols[i] = &plan.ColDef{ Name: col, Hidden: b.colIsHidden[i], Typ: *b.types[i], @@ -1387,17 +1386,17 @@ func appendRecursiveScanNode(builder *QueryBuilder, bindCtx *BindContext, source lastNodeId := builder.qry.Steps[sourceStep] // lastNode := builder.qry.Nodes[lastNodeId] recursiveScanProject := getProjectionByLastNodeWithTag(builder, lastNodeId, tag) - recursiveScanNode := &Node{ + recursiveScanNode := &plan.Node{ NodeType: plan.Node_RECURSIVE_SCAN, SourceStep: []int32{sourceStep}, ProjectList: recursiveScanProject, BindingTags: []int32{tag}, - TableDef: &TableDef{Name: bindCtx.cteName}, + TableDef: &plan.TableDef{Name: bindCtx.cteName}, } b := bindCtx.bindings[0] - recursiveScanNode.TableDef.Cols = make([]*ColDef, len(b.cols)) + recursiveScanNode.TableDef.Cols = make([]*plan.ColDef, len(b.cols)) for i, col := range b.cols { - recursiveScanNode.TableDef.Cols[i] = &ColDef{ + recursiveScanNode.TableDef.Cols[i] = &plan.ColDef{ Name: col, Hidden: b.colIsHidden[i], Typ: *b.types[i], @@ -1411,7 +1410,7 @@ func appendCTEScanNode(builder *QueryBuilder, bindCtx *BindContext, sourceStep, lastNodeId := builder.qry.Steps[sourceStep] // lastNode := builder.qry.Nodes[lastNodeId] recursiveScanProject := getProjectionByLastNodeWithTag(builder, lastNodeId, tag) - recursiveScanNode := &Node{ + recursiveScanNode := &plan.Node{ NodeType: plan.Node_RECURSIVE_CTE, SourceStep: []int32{sourceStep}, ProjectList: recursiveScanProject, @@ -1423,7 +1422,7 @@ func appendCTEScanNode(builder *QueryBuilder, bindCtx *BindContext, sourceStep, func appendSinkNode(builder *QueryBuilder, bindCtx *BindContext, lastNodeId int32) int32 { sinkProject := getProjectionByLastNode(builder, lastNodeId) - sinkNode := &Node{ + sinkNode := &plan.Node{ NodeType: plan.Node_SINK, Children: []int32{lastNodeId}, ProjectList: sinkProject, @@ -1434,7 +1433,7 @@ func appendSinkNode(builder *QueryBuilder, bindCtx *BindContext, lastNodeId int3 func appendSinkNodeWithTag(builder *QueryBuilder, bindCtx *BindContext, lastNodeId, tag int32) int32 { sinkProject := getProjectionByLastNodeWithTag(builder, lastNodeId, tag) - sinkNode := &Node{ + sinkNode := &plan.Node{ NodeType: plan.Node_SINK, Children: []int32{lastNodeId}, ProjectList: sinkProject, @@ -1454,18 +1453,18 @@ func appendSinkNodeWithTag(builder *QueryBuilder, bindCtx *BindContext, lastNode // } func appendAggCountGroupByColExpr(builder *QueryBuilder, bindCtx *BindContext, lastNodeId int32, colExpr *plan.Expr) (int32, error) { - aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "starcount", []*Expr{colExpr}) + aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "starcount", []*plan.Expr{colExpr}) if err != nil { return -1, err } countType := types.T_int64.ToType() - groupByNode := &Node{ + groupByNode := &plan.Node{ NodeType: plan.Node_AGG, Children: []int32{lastNodeId}, - GroupBy: []*Expr{colExpr}, - AggList: []*Expr{aggExpr}, - ProjectList: []*Expr{ + GroupBy: []*plan.Expr{colExpr}, + AggList: []*plan.Expr{aggExpr}, + ProjectList: []*plan.Expr{ { Typ: makePlan2Type(&countType), Expr: &plan.Expr_Col{ @@ -1490,7 +1489,7 @@ func appendAggCountGroupByColExpr(builder *QueryBuilder, bindCtx *BindContext, l return lastNodeId, nil } -func getPkPos(tableDef *TableDef, ignoreFakePK bool) (int, Type) { +func getPkPos(tableDef *plan.TableDef, ignoreFakePK bool) (int, plan.Type) { pkName := tableDef.Pkey.PkeyColName // if pkName == catalog.CPrimaryKeyColName { // return len(tableDef.Cols) - 1, makeHiddenColTyp() @@ -1503,10 +1502,10 @@ func getPkPos(tableDef *TableDef, ignoreFakePK bool) (int, Type) { return i, col.Typ } } - return -1, Type{} + return -1, plan.Type{} } -func getRowIdPos(tableDef *TableDef) int { +func getRowIdPos(tableDef *plan.TableDef) int { for i, col := range tableDef.Cols { if col.Name == catalog.Row_ID { return i @@ -1515,8 +1514,8 @@ func getRowIdPos(tableDef *TableDef) int { return -1 } -func getHiddenColumnForPreInsert(tableDef *TableDef) ([]Type, []string) { - var typs []Type +func getHiddenColumnForPreInsert(tableDef *plan.TableDef) ([]plan.Type, []string) { + var typs []plan.Type var names []string if tableDef.Pkey != nil && tableDef.Pkey.PkeyColName == catalog.CPrimaryKeyColName { typs = append(typs, makeHiddenColTyp()) @@ -1528,7 +1527,7 @@ func getHiddenColumnForPreInsert(tableDef *TableDef) ([]Type, []string) { return typs, names } -func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, baseNodeId int32) (int32, error) { +func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, baseNodeId int32) (int32, error) { typMap := make(map[string]plan.Type) id2name := make(map[uint64]string) name2pos := make(map[string]int) @@ -1541,12 +1540,12 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, //for stmt: update c1 set ref_col = null where col > 0; //we will skip foreign key constraint check when set null - var filterConds []*Expr + var filterConds []*plan.Expr for _, fk := range tableDef.Fkeys { for _, colId := range fk.Cols { for fIdx, col := range tableDef.Cols { if col.ColId == colId { - colExpr := &Expr{ + colExpr := &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1555,7 +1554,7 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, }, }, } - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*Expr{colExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*plan.Expr{colExpr}) if err != nil { return -1, err } @@ -1564,7 +1563,7 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, } } } - baseNodeId = builder.appendNode(&Node{ + baseNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{baseNodeId}, FilterList: filterConds, @@ -1590,17 +1589,17 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, if parentTableDef == nil { return -1, moerr.NewInternalErrorf(builder.GetContext(), "parent table %d not found", fk.ForeignTbl) } - newTableDef := DeepCopyTableDef(parentTableDef, false) + newTableDef := plan.DeepCopyTableDef(parentTableDef, false) joinConds := make([]*plan.Expr, 0) for _, col := range parentTableDef.Cols { if fkIdx, ok := fkeyId2Idx[col.ColId]; ok { rightPos := len(newTableDef.Cols) - newTableDef.Cols = append(newTableDef.Cols, DeepCopyColDef(col)) + newTableDef.Cols = append(newTableDef.Cols, plan.DeepCopyColDef(col)) parentColumnName := col.Name childColumnName := id2name[fk.Cols[fkIdx]] - leftExpr := &Expr{ + leftExpr := &plan.Expr{ Typ: typMap[childColumnName], Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1620,7 +1619,7 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, }, }, } - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{leftExpr, rightExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return -1, err } @@ -1631,7 +1630,7 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, parentTableDef = newTableDef // append table scan node - scanNodeProject := make([]*Expr, len(parentTableDef.Cols)) + scanNodeProject := make([]*plan.Expr, len(parentTableDef.Cols)) for colIdx, col := range parentTableDef.Cols { scanNodeProject[colIdx] = &plan.Expr{ Typ: col.Typ, @@ -1654,7 +1653,7 @@ func appendJoinNodeForParentFkCheck(builder *QueryBuilder, bindCtx *BindContext, projectList := getProjectionByLastNode(builder, lastNodeId) // append project - projectList = append(projectList, &Expr{ + projectList = append(projectList, &plan.Expr{ Typ: parentTableDef.Cols[0].Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1693,7 +1692,7 @@ func makeCompPkeyExpr(tableDef *plan.TableDef, name2ColIndex map[string]int32) * args[i] = &plan.Expr{ Typ: tableDef.Cols[colPos].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: colPos, }, }, @@ -1732,7 +1731,7 @@ func makeClusterByExpr(tableDef *plan.TableDef, name2ColIndex map[string]int32) args[i] = &plan.Expr{ Typ: tableDef.Cols[colPos].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: colPos, }, }, @@ -1756,7 +1755,7 @@ func makeClusterByExpr(tableDef *plan.TableDef, name2ColIndex map[string]int32) // appendPreInsertNode append preinsert node func appendPreInsertNode(builder *QueryBuilder, bindCtx *BindContext, - objRef *ObjectRef, tableDef *TableDef, + objRef *plan.ObjectRef, tableDef *plan.TableDef, lastNodeId int32, isUpdate bool) int32 { preInsertProjection := getProjectionByLastNode(builder, lastNodeId) @@ -1804,13 +1803,13 @@ func appendPreInsertNode(builder *QueryBuilder, bindCtx *BindContext, for i, col := range tableDef.Cols { name2ColIndex[col.Name] = int32(i) } - preInsertNode := &Node{ + preInsertNode := &plan.Node{ NodeType: plan.Node_PRE_INSERT, Children: []int32{lastNodeId}, ProjectList: preInsertProjection, PreInsertCtx: &plan.PreInsertCtx{ Ref: objRef, - TableDef: DeepCopyTableDef(tableDef, true), + TableDef: plan.DeepCopyTableDef(tableDef, true), HasAutoCol: hashAutoCol, IsOldUpdate: isUpdate, CompPkeyExpr: makeCompPkeyExpr(tableDef, name2ColIndex), @@ -1847,10 +1846,10 @@ func appendPreInsertNode(builder *QueryBuilder, bindCtx *BindContext, // appendPreInsertSkMasterPlan append preinsert node func appendPreInsertSkMasterPlan(builder *QueryBuilder, bindCtx *BindContext, - tableDef *TableDef, + tableDef *plan.TableDef, indexIdx int, isUpdate bool, - indexTableDef *TableDef, + indexTableDef *plan.TableDef, genLastNodeIdFn func() int32) (int32, error) { // 1. init details @@ -1860,7 +1859,7 @@ func appendPreInsertSkMasterPlan(builder *QueryBuilder, //var rowIdType *Type colsPos := make(map[string]int) - colsType := make(map[string]*Type) + colsType := make(map[string]*plan.Type) for i, colVal := range tableDef.Cols { //if colVal.Name == catalog.Row_ID { // rowIdPos = i @@ -1934,7 +1933,7 @@ func appendPreInsertSkMasterPlan(builder *QueryBuilder, return newSourceStep, nil } -func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *BindContext, tableDef *TableDef, genLastNodeIdFn func() int32, originPkPos int, part string, colsType map[string]*Type, colsPos map[string]int, originPkType Type) (*Node, error) { +func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *BindContext, tableDef *plan.TableDef, genLastNodeIdFn func() int32, originPkPos int, part string, colsType map[string]*plan.Type, colsPos map[string]int, originPkType plan.Type) (*plan.Node, error) { var err error // 1. get new source sink var currLastNodeId = genLastNodeIdFn() @@ -1943,12 +1942,12 @@ func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *Bin currLastNodeId = recomputeMoCPKeyViaProjection(builder, bindCtx, tableDef, currLastNodeId, originPkPos) //3. add a new project for < serial_full("0", a, pk), pk > - projectProjection := make([]*Expr, 2) + projectProjection := make([]*plan.Expr, 2) //3.i build serial_full("0", a, pk) serialArgs := make([]*plan.Expr, 3) serialArgs[0] = makePlan2StringConstExprWithType(getColSeqFromColDef(tableDef.Cols[colsPos[part]])) - serialArgs[1] = &Expr{ + serialArgs[1] = &plan.Expr{ Typ: *colsType[part], Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1958,7 +1957,7 @@ func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *Bin }, }, } - serialArgs[2] = &Expr{ + serialArgs[2] = &plan.Expr{ Typ: originPkType, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -1974,7 +1973,7 @@ func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *Bin } //3.ii build pk - projectProjection[1] = &Expr{ + projectProjection[1] = &plan.Expr{ Typ: originPkType, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2000,7 +1999,7 @@ func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *Bin // }) //} - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{currLastNodeId}, ProjectList: projectProjection, @@ -2008,14 +2007,14 @@ func buildSerialFullAndPKColsProjMasterIndex(builder *QueryBuilder, bindCtx *Bin return projectNode, nil } -func appendPreInsertSkVectorPlan(builder *QueryBuilder, bindCtx *BindContext, tableDef *TableDef, lastNodeId int32, multiTableIndex *MultiTableIndex, isUpdate bool, idxRefs []*ObjectRef, indexTableDefs []*TableDef) (int32, error) { +func appendPreInsertSkVectorPlan(builder *QueryBuilder, bindCtx *BindContext, tableDef *plan.TableDef, lastNodeId int32, multiTableIndex *MultiTableIndex, isUpdate bool, idxRefs []*plan.ObjectRef, indexTableDefs []*plan.TableDef) (int32, error) { //1.a get vector & pk column details var posOriginPk, posOriginVecColumn int - var typeOriginPk, typeOriginVecColumn Type + var typeOriginPk, typeOriginVecColumn plan.Type { colsMap := make(map[string]int) - colTypes := make([]Type, len(tableDef.Cols)) + colTypes := make([]plan.Type, len(tableDef.Cols)) for i, col := range tableDef.Cols { colsMap[col.Name] = i colTypes[i] = tableDef.Cols[i].Typ @@ -2089,11 +2088,11 @@ func appendPreInsertSkVectorPlan(builder *QueryBuilder, bindCtx *BindContext, ta return sourceStep, nil } -func recomputeMoCPKeyViaProjection(builder *QueryBuilder, bindCtx *BindContext, tableDef *TableDef, lastNodeId int32, posOriginPk int) int32 { +func recomputeMoCPKeyViaProjection(builder *QueryBuilder, bindCtx *BindContext, tableDef *plan.TableDef, lastNodeId int32, posOriginPk int) int32 { if tableDef.Pkey != nil && tableDef.Pkey.PkeyColName != catalog.FakePrimaryKeyColName { lastProject := builder.qry.Nodes[lastNodeId].ProjectList - projectProjection := make([]*Expr, len(lastProject)) + projectProjection := make([]*plan.Expr, len(lastProject)) for i := 0; i < len(lastProject); i++ { projectProjection[i] = &plan.Expr{ Typ: lastProject[i].Typ, @@ -2155,7 +2154,7 @@ func recomputeMoCPKeyViaProjection(builder *QueryBuilder, bindCtx *BindContext, } } } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -2170,11 +2169,11 @@ func recomputeMoCPKeyViaProjection(builder *QueryBuilder, bindCtx *BindContext, func appendPreInsertPlan( builder *QueryBuilder, bindCtx *BindContext, - tableDef *TableDef, + tableDef *plan.TableDef, lastNodeId int32, indexIdx int, isUpddate bool, - uniqueTableDef *TableDef, + uniqueTableDef *plan.TableDef, isUK bool) (int32, error) { /******** NOTE: make sure to make the major change applied to secondary index, to IVFFLAT index as well. @@ -2198,16 +2197,16 @@ func appendPreInsertPlan( pkColumn, originPkType := getPkPos(tableDef, false) lastNodeId = recomputeMoCPKeyViaProjection(builder, bindCtx, tableDef, lastNodeId, pkColumn) - var ukType Type + var ukType plan.Type if len(idxDef.Parts) == 1 { ukType = tableDef.Cols[useColumns[0]].Typ } else { - ukType = Type{ + ukType = plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, } } - var preinsertUkProjection []*Expr + var preinsertUkProjection []*plan.Expr preinsertUkProjection = append(preinsertUkProjection, &plan.Expr{ Typ: ukType, Expr: &plan.Expr_Col{ @@ -2246,9 +2245,9 @@ func appendPreInsertPlan( // ie preInsert -> preInsertIKNode // NOTE: we have build secondary index by reusing the whole code flow of Unique Index. // This would be done in a separate PR after verifying the correctness of the current code. - var preInsert *Node + var preInsert *plan.Node if isUK { - preInsert = &Node{ + preInsert = &plan.Node{ NodeType: plan.Node_PRE_INSERT_UK, Children: []int32{lastNodeId}, ProjectList: preinsertUkProjection, @@ -2262,7 +2261,7 @@ func appendPreInsertPlan( } else { // NOTE: We don't defined PreInsertSkCtx. Instead, we use PreInsertUkCtx for both UK and SK since there // is no difference in the contents. - preInsert = &Node{ + preInsert = &plan.Node{ NodeType: plan.Node_PRE_INSERT_SK, Children: []int32{lastNodeId}, ProjectList: preinsertUkProjection, @@ -2299,9 +2298,9 @@ func appendPreInsertPlan( func appendDeleteIndexTablePlan( builder *QueryBuilder, bindCtx *BindContext, - uniqueObjRef *ObjectRef, - uniqueTableDef *TableDef, - indexdef *IndexDef, + uniqueObjRef *plan.ObjectRef, + uniqueTableDef *plan.TableDef, + indexdef *plan.IndexDef, typMap map[string]plan.Type, posMap map[string]int, baseNodeId int32, @@ -2318,7 +2317,7 @@ func appendDeleteIndexTablePlan( var rightRowIdPos int32 = -1 var rightPkPos int32 = -1 - scanNodeProject := make([]*Expr, len(uniqueTableDef.Cols)) + scanNodeProject := make([]*plan.Expr, len(uniqueTableDef.Cols)) for colIdx, col := range uniqueTableDef.Cols { if col.Name == catalog.Row_ID { rightRowIdPos = int32(colIdx) @@ -2390,13 +2389,13 @@ func appendDeleteIndexTablePlan( } // append join node - var joinConds []*Expr - var leftExpr *Expr + var joinConds []*plan.Expr + var leftExpr *plan.Expr partsLength := len(indexdef.Parts) if partsLength == 1 { orginIndexColumnName := indexdef.Parts[0] typ := typMap[orginIndexColumnName] - leftExpr = &Expr{ + leftExpr = &plan.Expr{ Typ: typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2407,7 +2406,7 @@ func appendDeleteIndexTablePlan( }, } } else { - args := make([]*Expr, partsLength) + args := make([]*plan.Expr, partsLength) for i, column := range indexdef.Parts { column = catalog.ResolveAlias(column) typ := typMap[column] @@ -2438,11 +2437,11 @@ func appendDeleteIndexTablePlan( } } - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{rightExpr, leftExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{rightExpr, leftExpr}) if err != nil { return -1, err } - joinConds = []*Expr{condExpr} + joinConds = []*plan.Expr{condExpr} buildExpr := &plan.Expr{ Typ: pkTyp, @@ -2508,8 +2507,8 @@ func appendDeleteIndexTablePlan( } func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, - masterObjRef *ObjectRef, masterTableDef *TableDef, - baseNodeId int32, tableDef *TableDef, indexDef *plan.IndexDef, + masterObjRef *plan.ObjectRef, masterTableDef *plan.TableDef, + baseNodeId int32, tableDef *plan.TableDef, indexDef *plan.IndexDef, typMap map[string]plan.Type, posMap map[string]int) (int32, error) { originPkColumnPos, originPkType := getPkPos(tableDef, false) @@ -2519,7 +2518,7 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, var rightRowIdPos int32 = -1 var rightPkPos int32 = -1 - scanNodeProject := make([]*Expr, len(masterTableDef.Cols)) + scanNodeProject := make([]*plan.Expr, len(masterTableDef.Cols)) for colIdx, colVal := range masterTableDef.Cols { if colVal.Name == catalog.Row_ID { @@ -2550,13 +2549,13 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, // join conditions // Example :- // ( (serial_full('1', a, c) = __mo_index_idx_col) or (serial_full('1', b, c) = __mo_index_idx_col) ) - var joinConds *Expr + var joinConds *plan.Expr for idx, part := range indexDef.Parts { // serial_full("colPos", col1, pk) - var leftExpr *Expr - leftExprArgs := make([]*Expr, 3) + var leftExpr *plan.Expr + leftExprArgs := make([]*plan.Expr, 3) leftExprArgs[0] = makePlan2StringConstExprWithType(getColSeqFromColDef(tableDef.Cols[posMap[part]])) - leftExprArgs[1] = &Expr{ + leftExprArgs[1] = &plan.Expr{ Typ: typMap[part], Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2566,7 +2565,7 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, }, }, } - leftExprArgs[2] = &Expr{ + leftExprArgs[2] = &plan.Expr{ Typ: originPkType, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2591,7 +2590,7 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, }, }, } - currCond, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{leftExpr, rightExpr}) + currCond, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return -1, err } @@ -2628,7 +2627,7 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, NodeType: plan.Node_JOIN, JoinType: plan.Node_LEFT, Children: []int32{lastNodeId, rightId}, - OnList: []*Expr{joinConds}, + OnList: []*plan.Expr{joinConds}, ProjectList: projectList, }, bindCtx) @@ -2636,8 +2635,8 @@ func appendDeleteMasterTablePlan(builder *QueryBuilder, bindCtx *BindContext, } func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, - entriesObjRef *ObjectRef, entriesTableDef *TableDef, - baseNodeId int32, tableDef *TableDef) (int32, error) { + entriesObjRef *plan.ObjectRef, entriesTableDef *plan.TableDef, + baseNodeId int32, tableDef *plan.TableDef) (int32, error) { originPkColumnPos, originPkType := getPkPos(tableDef, false) @@ -2652,7 +2651,7 @@ func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, } neededCols := make([]*plan.ColDef, 0, 3) - scanNodeProject := make([]*Expr, 3) + scanNodeProject := make([]*plan.Expr, 3) for _, col := range entriesTableDef.Cols { if col.Name == catalog.Row_ID { scanNodeProject[0] = &plan.Expr{ @@ -2690,7 +2689,7 @@ func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, } } - newEntriesTableDef := DeepCopyTableDef(entriesTableDef, false) + newEntriesTableDef := plan.DeepCopyTableDef(entriesTableDef, false) newEntriesTableDef.Cols = neededCols ivfScanId := builder.appendNode(&plan.Node{ @@ -2737,7 +2736,7 @@ func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, } // append join node - var joinConds []*Expr + var joinConds []*plan.Expr var origExpr = &plan.Expr{ Typ: originPkType, Expr: &plan.Expr_Col{ @@ -2758,11 +2757,11 @@ func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, older versions of the entries to be stale. */ - condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{origExpr, ivfExpr}) + condExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{origExpr, ivfExpr}) if err != nil { return -1, err } - joinConds = []*Expr{condExpr} + joinConds = []*plan.Expr{condExpr} lastNodeId = builder.appendNode(&plan.Node{ NodeType: plan.Node_JOIN, @@ -2777,10 +2776,10 @@ func appendDeleteIvfTablePlan(builder *QueryBuilder, bindCtx *BindContext, func appendDeleteIndexTablePlanWithoutFilters( builder *QueryBuilder, bindCtx *BindContext, - uniqueObjRef *ObjectRef, - uniqueTableDef *TableDef, + uniqueObjRef *plan.ObjectRef, + uniqueTableDef *plan.TableDef, ) (int32, error) { - scanNodeProject := make([]*Expr, len(uniqueTableDef.Cols)) + scanNodeProject := make([]*plan.Expr, len(uniqueTableDef.Cols)) for colIdx, col := range uniqueTableDef.Cols { scanNodeProject[colIdx] = &plan.Expr{ Typ: col.Typ, @@ -2816,7 +2815,7 @@ func makePreUpdateDeletePlan( // append project Node to fetch the columns of this table // in front of this projectList are update cols - projectProjection := make([]*Expr, len(delCtx.tableDef.Cols)+delCtx.updateColLength) + projectProjection := make([]*plan.Expr, len(delCtx.tableDef.Cols)+delCtx.updateColLength) for i, col := range delCtx.tableDef.Cols { projectProjection[i] = &plan.Expr{ Typ: col.Typ, @@ -2847,7 +2846,7 @@ func makePreUpdateDeletePlan( }, } } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -2859,8 +2858,8 @@ func makePreUpdateDeletePlan( //eg: update t2, (select a from t2) as tt set t2.a= t2.a+1 where t2.b >10 if delCtx.needAggFilter { lastNode := builder.qry.Nodes[lastNodeId] - groupByExprs := make([]*Expr, len(delCtx.tableDef.Cols)) - aggNodeProjection := make([]*Expr, len(lastNode.ProjectList)) + groupByExprs := make([]*plan.Expr, len(delCtx.tableDef.Cols)) + aggNodeProjection := make([]*plan.Expr, len(lastNode.ProjectList)) for i := 0; i < len(delCtx.tableDef.Cols); i++ { e := lastNode.ProjectList[i] name := "" @@ -2889,7 +2888,7 @@ func makePreUpdateDeletePlan( } } offset := len(delCtx.tableDef.Cols) - aggList := make([]*Expr, delCtx.updateColLength) + aggList := make([]*plan.Expr, delCtx.updateColLength) for i := 0; i < delCtx.updateColLength; i++ { pos := offset + i e := lastNode.ProjectList[pos] @@ -2907,7 +2906,7 @@ func makePreUpdateDeletePlan( }, }, } - aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "any_value", []*Expr{baseExpr}) + aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "any_value", []*plan.Expr{baseExpr}) if err != nil { return -1, err } @@ -2924,7 +2923,7 @@ func makePreUpdateDeletePlan( } } - aggNode := &Node{ + aggNode := &plan.Node{ NodeType: plan.Node_AGG, Children: []int32{lastNodeId}, GroupBy: groupByExprs, @@ -2947,14 +2946,14 @@ func makePreUpdateDeletePlan( }, }, } - nullCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*Expr{rowIdExpr}) + nullCheckExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "isnotnull", []*plan.Expr{rowIdExpr}) if err != nil { return -1, err } - filterNode := &Node{ + filterNode := &plan.Node{ NodeType: plan.Node_FILTER, Children: []int32{lastNodeId}, - FilterList: []*Expr{nullCheckExpr}, + FilterList: []*plan.Expr{nullCheckExpr}, ProjectList: getProjectionByLastNode(builder, lastNodeId), } lastNodeId = builder.appendNode(filterNode, bindCtx) @@ -2975,7 +2974,7 @@ func makePreUpdateDeletePlan( RefreshTsIdxInBat: -1, LockTable: false, } - lockNode := &Node{ + lockNode := &plan.Node{ NodeType: plan.Node_LOCK_OP, Children: []int32{lastNodeId}, LockTargets: []*plan.LockTarget{lockTarget}, @@ -2988,10 +2987,10 @@ func makePreUpdateDeletePlan( // for compound primary key, we need append hidden pk column to the project list if delCtx.tableDef.Pkey.PkeyColName == catalog.CPrimaryKeyColName { - pkColExpr := make([]*Expr, len(delCtx.tableDef.Pkey.Names)) + pkColExpr := make([]*plan.Expr, len(delCtx.tableDef.Pkey.Names)) for i, colName := range delCtx.tableDef.Pkey.Names { colIdx := 0 - var colTyp *Type + var colTyp *plan.Type if idx, exists := delCtx.updateColPosMap[colName]; exists { colIdx = idx colTyp = &lastProjectList[idx].Typ @@ -3004,7 +3003,7 @@ func makePreUpdateDeletePlan( } } } - pkColExpr[i] = &Expr{ + pkColExpr[i] = &plan.Expr{ Typ: *colTyp, Expr: &plan.Expr_Col{Col: &plan.ColRef{ColPos: int32(colIdx)}}, } @@ -3015,7 +3014,7 @@ func makePreUpdateDeletePlan( } newPkPos = int32(len(lastProjectList)) lastProjectList = append(lastProjectList, cpPkExpr) - projNode := &Node{ + projNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: lastProjectList, @@ -3038,7 +3037,7 @@ func makePreUpdateDeletePlan( RefreshTsIdxInBat: -1, //unsupport now LockTable: false, } - lockNode := &Node{ + lockNode := &plan.Node{ NodeType: plan.Node_LOCK_OP, Children: []int32{lastNodeId}, LockTargets: []*plan.LockTarget{lockTarget}, @@ -3048,7 +3047,7 @@ func makePreUpdateDeletePlan( if len(lastProjectList) > originProjectListLen { projectList := lastProjectList[0:originProjectListLen] - projNode := &Node{ + projNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectList, @@ -3074,7 +3073,7 @@ func appendLockNode( builder *QueryBuilder, bindCtx *BindContext, lastNodeId int32, - tableDef *TableDef, + tableDef *plan.TableDef, lockTable bool, block bool, isUpdate bool, @@ -3100,7 +3099,7 @@ func appendLockNode( Block: block, } - lockNode := &Node{ + lockNode := &plan.Node{ NodeType: plan.Node_LOCK_OP, Children: []int32{lastNodeId}, LockTargets: []*plan.LockTarget{lockTarget}, @@ -3123,7 +3122,7 @@ type sinkScanMeta struct { recursive bool } -func reduceSinkSinkScanNodes(qry *Query) { +func reduceSinkSinkScanNodes(qry *plan.Query) { if len(qry.Steps) == 1 { return } @@ -3182,7 +3181,7 @@ func reduceSinkSinkScanNodes(qry *Query) { } func collectSinkAndSinkScanMeta( - qry *Query, + qry *plan.Query, sinks map[int32]*sinkMeta, oldStep int, nodeId int32, @@ -3572,7 +3571,7 @@ func IsForeignKeyChecksEnabled(ctx CompilerContext) (bool, error) { } } -func buildPreInsertMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, +func buildPreInsertMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, sourceStep int32, multiTableIndexes map[string]*MultiTableIndex) error { var lastNodeId int32 var err error @@ -3592,8 +3591,8 @@ func buildPreInsertMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, } lastNodeId = appendSinkScanNode(builder, bindCtx, sourceStep) - var idxRefs = make([]*ObjectRef, 3) - var idxTableDefs = make([]*TableDef, 3) + var idxRefs = make([]*plan.ObjectRef, 3) + var idxTableDefs = make([]*plan.TableDef, 3) // TODO: node should hold snapshot and account info //idxRefs[0], idxTableDefs[0] = ctx.Resolve(objRef.SchemaName, multiTableIndex.IndexDefs[catalog.SystemSI_IVFFLAT_TblType_Metadata].IndexTableName, timestamp.Timestamp{}) //idxRefs[1], idxTableDefs[1] = ctx.Resolve(objRef.SchemaName, multiTableIndex.IndexDefs[catalog.SystemSI_IVFFLAT_TblType_Centroids].IndexTableName, timestamp.Timestamp{}) @@ -3614,7 +3613,7 @@ func buildPreInsertMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, // remove row_id for i := range idxTableDefs { - idxTableDefs[i].Cols = RemoveIf[*ColDef](idxTableDefs[i].Cols, func(column *ColDef) bool { + idxTableDefs[i].Cols = RemoveIf[*plan.ColDef](idxTableDefs[i].Cols, func(column *plan.ColDef) bool { return column.Name == catalog.Row_ID }) } @@ -3637,8 +3636,8 @@ func buildPreInsertMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, updatePkCol := true ifExistAutoPkCol := false ifCheckPkDup := false - var pkFilterExprs []*Expr - var partitionExpr *Expr + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr var fuzzymessage *OriginTableMessageForFuzzy var ifInsertFromUnique bool err = makeOneInsertPlan(ctx, builder, bindCtx, idxRefs[2], idxTableDefs[2], @@ -3675,8 +3674,8 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi } // Used by pre-insert vector index. - var idxRefs = make([]*ObjectRef, 3) - var idxTableDefs = make([]*TableDef, 3) + var idxRefs = make([]*plan.ObjectRef, 3) + var idxTableDefs = make([]*plan.TableDef, 3) // TODO: plan node should hold snapshot and account info //idxRefs[0], idxTableDefs[0] = ctx.Resolve(delCtx.objRef.SchemaName, multiTableIndex.IndexDefs[catalog.SystemSI_IVFFLAT_TblType_Metadata].IndexTableName, timestamp.Timestamp{}) //idxRefs[1], idxTableDefs[1] = ctx.Resolve(delCtx.objRef.SchemaName, multiTableIndex.IndexDefs[catalog.SystemSI_IVFFLAT_TblType_Centroids].IndexTableName, timestamp.Timestamp{}) @@ -3704,7 +3703,7 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi var err error var entriesDeleteIdx int var entriesTblPkPos int - var entriesTblPkTyp Type + var entriesTblPkTyp plan.Type if delCtx.isDeleteWithoutFilters { lastNodeId, err = appendDeleteIndexTablePlanWithoutFilters(builder, bindCtx, entriesObjRef, entriesTableDef) @@ -3747,8 +3746,8 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi lastProject := builder.qry.Nodes[lastNodeId].ProjectList lastProjectForTblJoinCentroids := builder.qry.Nodes[lastNodeIdForTblJoinCentroids].ProjectList - projectProjection := make([]*Expr, len(delCtx.tableDef.Cols)) - projectProjectionForTblJoinCentroids := make([]*Expr, len(delCtx.tableDef.Cols)) + projectProjection := make([]*plan.Expr, len(delCtx.tableDef.Cols)) + projectProjectionForTblJoinCentroids := make([]*plan.Expr, len(delCtx.tableDef.Cols)) for j, uCols := range delCtx.tableDef.Cols { if nIdx, ok := delCtx.updateColPosMap[uCols.Name]; ok { projectProjection[j] = lastProject[nIdx] @@ -3765,7 +3764,7 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi } } } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -3777,10 +3776,10 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi return err } - insertEntriesTableDef := DeepCopyTableDef(entriesTableDef, false) + insertEntriesTableDef := plan.DeepCopyTableDef(entriesTableDef, false) for _, col := range entriesTableDef.Cols { if col.Name != catalog.Row_ID { - insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, DeepCopyColDef(col)) + insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, plan.DeepCopyColDef(col)) } } updateColLength := 1 @@ -3790,9 +3789,9 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi ifExistAutoPkCol := false ifCheckPkDup := false ifInsertFromUnique := false - var pkFilterExprs []*Expr - var partitionExpr *Expr - var indexSourceColTypes []*Type + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr + var indexSourceColTypes []*plan.Type var fuzzymessage *OriginTableMessageForFuzzy err = makeOneInsertPlan(ctx, builder, bindCtx, entriesObjRef, insertEntriesTableDef, updateColLength, preUKStep, addAffectedRows, isFkRecursionCall, updatePkCol, @@ -3821,7 +3820,7 @@ func buildDeleteMultiTableIndexes(ctx CompilerContext, builder *QueryBuilder, bi return nil } -func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, +func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, sourceStep int32, ifInsertFromUniqueColMap map[string]bool, indexdef *plan.IndexDef, idx int) error { idxRef, idxTableDef, err := ctx.ResolveIndexTableByRef(objRef, indexdef.IndexTableName, nil) @@ -3829,7 +3828,7 @@ func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder return err } // remove row_id - idxTableDef.Cols = RemoveIf[*ColDef](idxTableDef.Cols, func(col *ColDef) bool { + idxTableDef.Cols = RemoveIf[*plan.ColDef](idxTableDef.Cols, func(col *plan.ColDef) bool { return col.Name == catalog.Row_ID }) @@ -3841,7 +3840,7 @@ func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder needCheckPkDupForHiddenTable := indexdef.Unique // only check PK uniqueness for UK. SK will not check PK uniqueness. var insertColsNameFromStmt []string - var pkFilterExprForHiddenTable []*Expr + var pkFilterExprForHiddenTable []*plan.Expr var originTableMessageForFuzzy *OriginTableMessageForFuzzy var ifInsertFromUnique bool @@ -3854,7 +3853,7 @@ func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder return err } // remove row_id - idxTableDef.Cols = RemoveIf[*ColDef](idxTableDef.Cols, func(colVal *ColDef) bool { + idxTableDef.Cols = RemoveIf[*plan.ColDef](idxTableDef.Cols, func(colVal *plan.ColDef) bool { return colVal.Name == catalog.Row_ID }) originTableMessageForFuzzy = &OriginTableMessageForFuzzy{ @@ -3925,7 +3924,7 @@ func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder if builder.isRestore { needCheckPkDupForHiddenTable = false } - var partitionExpr *Expr + var partitionExpr *plan.Expr err = makeOneInsertPlan(ctx, builder, bindCtx, idxRef, idxTableDef, updateColLength, newSourceStep, addAffectedRows, isFkRecursionCall, updatePkCol, pkFilterExprForHiddenTable, partitionExpr, ifExistAutoPkCol, needCheckPkDupForHiddenTable, ifInsertFromUnique, @@ -3934,14 +3933,14 @@ func buildPreInsertRegularIndex(stmt *tree.Insert, ctx CompilerContext, builder return err } -func buildPreInsertMasterIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, +func buildPreInsertMasterIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, sourceStep int32, ifInsertFromUniqueColMap map[string]bool, indexdef *plan.IndexDef, idx int) error { idxRef, idxTableDef, err := ctx.ResolveIndexTableByRef(objRef, indexdef.IndexTableName, nil) if err != nil { return err } // remove row_id - idxTableDef.Cols = RemoveIf[*ColDef](idxTableDef.Cols, func(colVal *ColDef) bool { + idxTableDef.Cols = RemoveIf[*plan.ColDef](idxTableDef.Cols, func(colVal *plan.ColDef) bool { return colVal.Name == catalog.Row_ID }) genLastNodeIdFn := func() int32 { @@ -3966,8 +3965,8 @@ func buildPreInsertMasterIndex(stmt *tree.Insert, ctx CompilerContext, builder * ifExistAutoPkCol := false ifCheckPkDup := false ifInsertFromUnique := false - var pkFilterExprs []*Expr - var partitionExpr *Expr + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr var fuzzymessage *OriginTableMessageForFuzzy err = makeOneInsertPlan(ctx, builder, bindCtx, idxRef, idxTableDef, updateColLength, newSourceStep, addAffectedRows, isFkRecursionCall, updatePkCol, @@ -3999,7 +3998,7 @@ func buildDeleteRegularIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx var lastNodeId int32 var uniqueDeleteIdx int var uniqueTblPkPos int - var uniqueTblPkTyp Type + var uniqueTblPkTyp plan.Type if delCtx.isDeleteWithoutFilters { lastNodeId, err = appendDeleteIndexTablePlanWithoutFilters(builder, bindCtx, uniqueObjRef, uniqueTableDef) @@ -4035,7 +4034,7 @@ func buildDeleteRegularIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx { lastNodeId = appendSinkScanNode(builder, bindCtx, newSourceStep) lastProject := builder.qry.Nodes[lastNodeId].ProjectList - projectProjection := make([]*Expr, len(delCtx.tableDef.Cols)) + projectProjection := make([]*plan.Expr, len(delCtx.tableDef.Cols)) for j, uCols := range delCtx.tableDef.Cols { if nIdx, ok := delCtx.updateColPosMap[uCols.Name]; ok { projectProjection[j] = lastProject[nIdx] @@ -4048,7 +4047,7 @@ func buildDeleteRegularIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx } } } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -4059,10 +4058,10 @@ func buildDeleteRegularIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx return err } - insertUniqueTableDef := DeepCopyTableDef(uniqueTableDef, false) + insertUniqueTableDef := plan.DeepCopyTableDef(uniqueTableDef, false) for _, col := range uniqueTableDef.Cols { if col.Name != catalog.Row_ID { - insertUniqueTableDef.Cols = append(insertUniqueTableDef.Cols, DeepCopyColDef(col)) + insertUniqueTableDef.Cols = append(insertUniqueTableDef.Cols, plan.DeepCopyColDef(col)) } } _checkPKDupForHiddenIndexTable := indexdef.Unique // only check PK uniqueness for UK. SK will not check PK uniqueness. @@ -4072,9 +4071,9 @@ func buildDeleteRegularIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx updatePkCol := true ifExistAutoPkCol := false ifInsertFromUnique := false - var pkFilterExprs []*Expr - var partitionExpr *Expr - var indexSourceColTypes []*Type + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr + var indexSourceColTypes []*plan.Type var fuzzymessage *OriginTableMessageForFuzzy err = makeOneInsertPlan(ctx, builder, bindCtx, uniqueObjRef, insertUniqueTableDef, updateColLength, preUKStep, addAffectedRows, isFkRecursionCall, updatePkCol, @@ -4113,7 +4112,7 @@ func buildDeleteMasterIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx var lastNodeId int32 var masterDeleteIdx int var masterTblPkPos int - var masterTblPkTyp Type + var masterTblPkTyp plan.Type if delCtx.isDeleteWithoutFilters { lastNodeId, err = appendDeleteIndexTablePlanWithoutFilters(builder, bindCtx, masterObjRef, masterTableDef) @@ -4154,7 +4153,7 @@ func buildDeleteMasterIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx //TODO: verify if this will cause memory leak. newLastNodeId := appendSinkScanNode(builder, bindCtx, newSourceStep) lastProject := builder.qry.Nodes[newLastNodeId].ProjectList - projectProjection := make([]*Expr, len(delCtx.tableDef.Cols)) + projectProjection := make([]*plan.Expr, len(delCtx.tableDef.Cols)) for j, uCols := range delCtx.tableDef.Cols { if nIdx, ok := delCtx.updateColPosMap[uCols.Name]; ok { projectProjection[j] = lastProject[nIdx] @@ -4181,7 +4180,7 @@ func buildDeleteMasterIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx } } } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{newLastNodeId}, ProjectList: projectProjection, @@ -4194,10 +4193,10 @@ func buildDeleteMasterIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx return err } - insertEntriesTableDef := DeepCopyTableDef(masterTableDef, false) + insertEntriesTableDef := plan.DeepCopyTableDef(masterTableDef, false) for _, col := range masterTableDef.Cols { if col.Name != catalog.Row_ID { - insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, DeepCopyColDef(col)) + insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, plan.DeepCopyColDef(col)) } } updateColLength := 1 @@ -4207,9 +4206,9 @@ func buildDeleteMasterIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx ifExistAutoPkCol := false ifCheckPkDup := false ifInsertFromUnique := false - var pkFilterExprs []*Expr - var partitionExpr *Expr - var indexSourceColTypes []*Type + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr + var indexSourceColTypes []*plan.Type var fuzzymessage *OriginTableMessageForFuzzy err = makeOneInsertPlan(ctx, builder, bindCtx, masterObjRef, insertEntriesTableDef, updateColLength, preUKStep, addAffectedRows, isFkRecursionCall, updatePkCol, @@ -4268,7 +4267,7 @@ func buildDeleteIndexPlans(ctx CompilerContext, builder *QueryBuilder, bindCtx * if (hasUniqueKey || hasSecondaryKey) && !canTruncate { typMap := make(map[string]plan.Type) posMap := make(map[string]int) - colMap := make(map[string]*ColDef) + colMap := make(map[string]*plan.ColDef) for idx, col := range delCtx.tableDef.Cols { posMap[col.Name] = idx typMap[col.Name] = col.Typ @@ -4335,7 +4334,7 @@ func buildDeleteIndexPlans(ctx CompilerContext, builder *QueryBuilder, bindCtx * multiTableIndexes[indexdef.IndexName] = &MultiTableIndex{ IndexAlgo: catalog.ToLower(indexdef.IndexAlgo), IndexAlgoParams: indexdef.IndexAlgoParams, - IndexDefs: make(map[string]*IndexDef), + IndexDefs: make(map[string]*plan.IndexDef), } } multiTableIndexes[indexdef.IndexName].IndexDefs[catalog.ToLower(indexdef.IndexAlgoTableType)] = indexdef @@ -4388,8 +4387,8 @@ func buildDeleteIndexPlans(ctx CompilerContext, builder *QueryBuilder, bindCtx * // For INSERT, create INSERT plan with prePreInsertFullTextIndex() // For UPDATE, create DELETE plan with prePreDeleteFullTextIndex() and then create INSERT plan with preInsertFullTextIndex(). // i.e. delete old rows and then insert new values -func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, - tableDef *TableDef, updateColLength int, sourceStep int32, ifInsertFromUniqueColMap map[string]bool, indexdef *plan.IndexDef, +func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, + tableDef *plan.TableDef, updateColLength int, sourceStep int32, ifInsertFromUniqueColMap map[string]bool, indexdef *plan.IndexDef, idx int, updateColPosMap map[string]int) error { // Check if secondary key is being updated. @@ -4474,7 +4473,7 @@ func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder }) } - ftcols := DeepCopyColDefList(tokenizeColDefs) + ftcols := plan.DeepCopyColDefList(tokenizeColDefs) ftcols[0].Typ = tableDef.Cols[pkPos].Typ tablefunc := &plan.Node{ @@ -4539,7 +4538,7 @@ func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder NotNullable: false, }, Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, @@ -4561,20 +4560,20 @@ func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder return moerr.NewNoSuchTable(builder.GetContext(), objRef.SchemaName, indexdef.IndexName) } - insertEntriesTableDef := DeepCopyTableDef(indexTableDef, false) + insertEntriesTableDef := plan.DeepCopyTableDef(indexTableDef, false) for _, col := range indexTableDef.Cols { if col.Name != catalog.Row_ID { - insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, DeepCopyColDef(col)) + insertEntriesTableDef.Cols = append(insertEntriesTableDef.Cols, plan.DeepCopyColDef(col)) } } - preInsertNode := &Node{ + preInsertNode := &plan.Node{ NodeType: plan.Node_PRE_INSERT, Children: []int32{lastNodeId}, ProjectList: project, PreInsertCtx: &plan.PreInsertCtx{ Ref: indexObjRef, - TableDef: DeepCopyTableDef(indexTableDef, true), + TableDef: plan.DeepCopyTableDef(indexTableDef, true), HasAutoCol: true, IsOldUpdate: false, CompPkeyExpr: nil, @@ -4605,9 +4604,9 @@ func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder ifExistAutoPkCol := true ifCheckPkDup := false ifInsertFromUnique := false - var pkFilterExprs []*Expr - var partitionExpr *Expr - var indexSourceColTypes []*Type + var pkFilterExprs []*plan.Expr + var partitionExpr *plan.Expr + var indexSourceColTypes []*plan.Type var fuzzymessage *OriginTableMessageForFuzzy err = makeOneInsertPlan(ctx, builder, bindCtx, indexObjRef, insertEntriesTableDef, updateColLength, newSourceStep, addAffectedRows, isFkRecursionCall, updatePkCol, @@ -4618,12 +4617,12 @@ func buildPreInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder // To create rows of (rowid, docid, __mo_fake_pk_col) for DELETE func buildDeleteRowsFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, delCtx *dmlPlanCtx, - indexObjRef *ObjectRef, indexTableDef *TableDef, indexdef *plan.IndexDef, typMap map[string]plan.Type, posMap map[string]int) (int32, int, int, Type, error) { + indexObjRef *plan.ObjectRef, indexTableDef *plan.TableDef, indexdef *plan.IndexDef, typMap map[string]plan.Type, posMap map[string]int) (int32, int, int, plan.Type, error) { if delCtx.isDeleteWithoutFilters { // truncate and create a table scan of index table - scanNodeProject := make([]*Expr, len(indexTableDef.Cols)) + scanNodeProject := make([]*plan.Expr, len(indexTableDef.Cols)) for colIdx, col := range indexTableDef.Cols { scanNodeProject[colIdx] = &plan.Expr{ Typ: col.Typ, @@ -4660,7 +4659,7 @@ func buildDeleteRowsFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bi var docIdPos int32 = -1 neededCols := make([]*plan.ColDef, 0, 3) - scanNodeProject := make([]*Expr, 3) + scanNodeProject := make([]*plan.Expr, 3) for _, colVal := range indexTableDef.Cols { if colVal.Name == catalog.Row_ID { scanNodeProject[0] = &plan.Expr{ @@ -4699,7 +4698,7 @@ func buildDeleteRowsFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bi } } - newIndexTableDef := DeepCopyTableDef(indexTableDef, false) + newIndexTableDef := plan.DeepCopyTableDef(indexTableDef, false) newIndexTableDef.Cols = neededCols probeExpr := &plan.Expr{ @@ -4746,12 +4745,12 @@ func buildDeleteRowsFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bi }, } - joinCond, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{leftExpr, rightExpr}) + joinCond, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { - return -1, -1, -1, Type{}, err + return -1, -1, -1, plan.Type{}, err } - projectList := make([]*Expr, 0, 2) + projectList := make([]*plan.Expr, 0, 2) projectList = append(projectList, &plan.Expr{ Typ: scanNodeProject[0].Typ, Expr: &plan.Expr_Col{ @@ -4797,7 +4796,7 @@ func buildDeleteRowsFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bi NodeType: plan.Node_JOIN, JoinType: plan.Node_INNER, Children: []int32{idxScanId, lastNodeId}, - OnList: []*Expr{joinCond}, + OnList: []*plan.Expr{joinCond}, ProjectList: projectList, RuntimeFilterBuildList: []*plan.RuntimeFilterSpec{MakeRuntimeFilter(rfTag, false, GetInFilterCardLimit(sid), rfBuildExpr, true)}, }, bindCtx) @@ -4884,7 +4883,7 @@ func buildPreDeleteFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bin } // build PostDml FullText Index node -func buildPostDmlFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, indexObjRef *ObjectRef, indexTableDef *TableDef, tableDef *TableDef, +func buildPostDmlFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, indexObjRef *plan.ObjectRef, indexTableDef *plan.TableDef, tableDef *plan.TableDef, sourceStep int32, indexdef *plan.IndexDef, idx int, isDelete, isInsert, isDeleteWithoutFilters bool) error { // skip async @@ -4949,7 +4948,7 @@ func buildPostDeleteFullTextIndex(ctx CompilerContext, builder *QueryBuilder, bi } // Post Insert FullText Index to use PostDml node to save INSERT SQL and execute after the pipelines -func buildPostInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *ObjectRef, tableDef *TableDef, +func buildPostInsertFullTextIndex(stmt *tree.Insert, ctx CompilerContext, builder *QueryBuilder, bindCtx *BindContext, objRef *plan.ObjectRef, tableDef *plan.TableDef, updateColLength int, sourceStep int32, ifInsertFromUniqueColMap map[string]bool, indexdef *plan.IndexDef, idx int) error { //isUpdate := updateColLength > 0 diff --git a/pkg/sql/plan/build_dml_util_test.go b/pkg/sql/planner/build_dml_util_test.go similarity index 99% rename from pkg/sql/plan/build_dml_util_test.go rename to pkg/sql/planner/build_dml_util_test.go index 2fe741e4545c5..156956a1b8b0e 100644 --- a/pkg/sql/plan/build_dml_util_test.go +++ b/pkg/sql/planner/build_dml_util_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/build_expr_test.go b/pkg/sql/planner/build_expr_test.go similarity index 99% rename from pkg/sql/plan/build_expr_test.go rename to pkg/sql/planner/build_expr_test.go index 4e80bba5243b5..63cd3acd59fff 100644 --- a/pkg/sql/plan/build_expr_test.go +++ b/pkg/sql/planner/build_expr_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" diff --git a/pkg/sql/plan/build_index_util.go b/pkg/sql/planner/build_index_util.go similarity index 99% rename from pkg/sql/plan/build_index_util.go rename to pkg/sql/planner/build_index_util.go index 7cbea61f85c2b..f319a52d52960 100644 --- a/pkg/sql/plan/build_index_util.go +++ b/pkg/sql/planner/build_index_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/build_insert.go b/pkg/sql/planner/build_insert.go similarity index 92% rename from pkg/sql/plan/build_insert.go rename to pkg/sql/planner/build_insert.go index 233981335f135..bdedc7053ea15 100644 --- a/pkg/sql/plan/build_insert.go +++ b/pkg/sql/planner/build_insert.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -30,7 +30,7 @@ import ( v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" ) -func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepareStmt bool) (p *Plan, err error) { +func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepareStmt bool) (p *plan.Plan, err error) { start := time.Now() defer func() { v2.TxnStatementBuildInsertHistogram.Observe(time.Since(start).Seconds()) @@ -83,7 +83,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa builder.isRestoreByTs = true } oldSnapshot := builder.compCtx.GetSnapshot() - builder.compCtx.SetSnapshot(&Snapshot{ + builder.compCtx.SetSnapshot(&plan.Snapshot{ Tenant: &plan.SnapshotTenant{ TenantName: "xxx", TenantID: stmt.FromDataTenantID, @@ -114,7 +114,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa objRef := tblInfo.objRef[0] if len(rewriteInfo.onDuplicateIdx) > 0 { // append on duplicate key node - tableDef = DeepCopyTableDef(tableDef, true) + tableDef = plan.DeepCopyTableDef(tableDef, true) if tableDef.Pkey != nil && tableDef.Pkey.PkeyColName == catalog.CPrimaryKeyColName { tableDef.Cols = append(tableDef.Cols, tableDef.Pkey.CompPkeyCol) } @@ -126,34 +126,34 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa // if table have pk & unique key. we need append an agg node before on_duplicate_key if rewriteInfo.onDuplicateNeedAgg { colLen := len(tableDef.Cols) - aggGroupBy := make([]*Expr, 0, colLen) - aggList := make([]*Expr, 0, len(dupProjection)-colLen) - aggProject := make([]*Expr, 0, len(dupProjection)) + aggGroupBy := make([]*plan.Expr, 0, colLen) + aggList := make([]*plan.Expr, 0, len(dupProjection)-colLen) + aggProject := make([]*plan.Expr, 0, len(dupProjection)) for i := 0; i < len(dupProjection); i++ { if i < colLen { - aggGroupBy = append(aggGroupBy, &Expr{ + aggGroupBy = append(aggGroupBy, &plan.Expr{ Typ: dupProjection[i].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: int32(i), }, }, }) - aggProject = append(aggProject, &Expr{ + aggProject = append(aggProject, &plan.Expr{ Typ: dupProjection[i].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(i), }, }, }) } else { - aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "any_value", []*Expr{ + aggExpr, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "any_value", []*plan.Expr{ { Typ: dupProjection[i].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: int32(i), }, }, @@ -163,10 +163,10 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa return nil, err } aggList = append(aggList, aggExpr) - aggProject = append(aggProject, &Expr{ + aggProject = append(aggProject, &plan.Expr{ Typ: dupProjection[i].Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -2, ColPos: int32(i), }, @@ -175,7 +175,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa } } - aggNode := &Node{ + aggNode := &plan.Node{ NodeType: plan.Node_AGG, Children: []int32{lastNodeId}, GroupBy: aggGroupBy, @@ -212,7 +212,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa } uniqueCol[i] = strings.Join(keys, ",") } - onDuplicateKeyNode := &Node{ + onDuplicateKeyNode := &plan.Node{ NodeType: plan.Node_ON_DUPLICATE_KEY, Children: []int32{lastNodeId}, ProjectList: dupProjection, @@ -236,8 +236,8 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa updateColPosMap := make(map[string]int) updatePkCol := false var insertColPos []int - var projectProjection []*Expr - tableDef = DeepCopyTableDef(tableDef, true) + var projectProjection []*plan.Expr + tableDef = plan.DeepCopyTableDef(tableDef, true) tableDef.Cols = append(tableDef.Cols, MakeRowIdColDef()) colLength := len(tableDef.Cols) rowIdPos := colLength - 1 @@ -279,7 +279,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa updateColPosMap[col.Name] = colLength + i insertColPos = append(insertColPos, colLength+i) } - projectNode := &Node{ + projectNode := &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{lastNodeId}, ProjectList: projectProjection, @@ -328,7 +328,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa builder.tempOptimizeForDML() reCheckifNeedLockWholeTable(builder) - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, @@ -344,7 +344,7 @@ func buildInsert(stmt *tree.Insert, ctx CompilerContext, isReplace bool, isPrepa // If the INSERT statement specifies the columns, it validates the column names against the table definition // and returns an error if any of the column names are invalid. // The function returns the list of insert columns and an error, if any. -func getInsertColsFromStmt(ctx context.Context, stmt *tree.Insert, tableDef *TableDef) ([]string, error) { +func getInsertColsFromStmt(ctx context.Context, stmt *tree.Insert, tableDef *plan.TableDef) ([]string, error) { var insertColsName []string colToIdx := make(map[string]int32) for i, col := range tableDef.Cols { @@ -386,7 +386,7 @@ func getInsertColsFromStmt(ctx context.Context, stmt *tree.Insert, tableDef *Tab // for 3 and 5, after valuescan refactor, insert batch is construct in compile , so we can't get batch here, return false directly // // Otherwise, the primary key filter cannot be used. -func canUsePkFilter(builder *QueryBuilder, ctx CompilerContext, stmt *tree.Insert, tableDef *TableDef, insertColsName []string, uniqueIndexDef *IndexDef) bool { +func canUsePkFilter(builder *QueryBuilder, ctx CompilerContext, stmt *tree.Insert, tableDef *plan.TableDef, insertColsName []string, uniqueIndexDef *plan.IndexDef) bool { var isCompound bool var used4UniqueIndex bool // mark if this pkfilter is used for hidden table created by unique index @@ -514,7 +514,7 @@ type locationMap struct { // need to check if the primary key filter can be used before calling this function. // also need to consider both origin table and hidden table for unique key -func newLocationMap(tableDef *TableDef, uniqueIndexDef *IndexDef) *locationMap { +func newLocationMap(tableDef *plan.TableDef, uniqueIndexDef *plan.IndexDef) *locationMap { if uniqueIndexDef != nil && !uniqueIndexDef.Unique { panic("uniqueIndexDef.Unique must be true") } @@ -547,10 +547,10 @@ func newLocationMap(tableDef *TableDef, uniqueIndexDef *IndexDef) *locationMap { } } -func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableDef, lmap *locationMap, insertColsNameFromStmt []string) (pkFilterExprs []*Expr, err error) { +func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *plan.TableDef, lmap *locationMap, insertColsNameFromStmt []string) (pkFilterExprs []*plan.Expr, err error) { var pkLocationInfo orderAndIdx var ok bool - var col *ColDef + var col *plan.ColDef proc := ctx.GetProcess() node := builder.qry.Nodes[0] isCompound := len(lmap.m) > 1 @@ -569,7 +569,7 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD // colExprs will store the constant value expressions (or UUID value) for each primary key column by the order in insert value SQL // that is, the key part of pkPosInValues, more info see the comment of func getPkOrderInValues - colExprs := make([][]*Expr, len(lmap.m)) + colExprs := make([][]*plan.Expr, len(lmap.m)) rowsCount := len(node.RowsetData.Cols[0].Data) // If the expression is nil, it creates a constant expression with either the UUID value or a constant value. for idx, name := range insertColsNameFromStmt { @@ -578,10 +578,10 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD } col = tableDef.Cols[tableDef.Name2ColIndex[name]] - valExprs := make([]*Expr, rowsCount) + valExprs := make([]*plan.Expr, rowsCount) for i, data := range node.RowsetData.Cols[idx].Data { - rowExpr := DeepCopyExpr(data.Expr) + rowExpr := plan.DeepCopyExpr(data.Expr) e, err := forceCastExpr(builder.GetContext(), rowExpr, col.Typ) if err != nil { return nil, err @@ -607,7 +607,7 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD pkExpr := &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: 0, Name: colName, }, @@ -616,14 +616,14 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD if rowsCount == 1 { // pk = a1 or pk = a2 or pk = a3 - filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ pkExpr, colExprs[0][0], }) } else { // pk in (a1, a2, a3) // args in list must be constant - filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "in", []*Expr{ + filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "in", []*plan.Expr{ pkExpr, { Typ: pkExpr.Typ, @@ -649,7 +649,7 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD pkExpr := &plan.Expr{ Typ: makeHiddenColTyp(), Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ ColPos: colPos, Name: colName, }, @@ -664,7 +664,7 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD } serialExpr, _ := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", serialArgs) - filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ pkExpr, serialExpr, }) @@ -678,7 +678,7 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD inArgs[i], _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", serialArgs) } - filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "in", []*Expr{ + filterExpr, _ = BindFuncExprImplByPlanExpr(builder.GetContext(), "in", []*plan.Expr{ pkExpr, { Typ: pkExpr.Typ, @@ -697,10 +697,10 @@ func getPkValueExpr(builder *QueryBuilder, ctx CompilerContext, tableDef *TableD return nil, nil } - return []*Expr{filterExpr}, nil + return []*plan.Expr{filterExpr}, nil } -func getRewriteToReplaceStmt(tableDef *TableDef, stmt *tree.Insert, info *dmlSelectInfo, isPrepareStmt bool) *tree.Replace { +func getRewriteToReplaceStmt(tableDef *plan.TableDef, stmt *tree.Insert, info *dmlSelectInfo, isPrepareStmt bool) *tree.Replace { if len(info.onDuplicateIdx) == 0 { return nil } diff --git a/pkg/sql/plan/build_load.go b/pkg/sql/planner/build_load.go similarity index 94% rename from pkg/sql/plan/build_load.go rename to pkg/sql/planner/build_load.go index cc47dc9e41a0e..a03bed7d6e365 100644 --- a/pkg/sql/plan/build_load.go +++ b/pkg/sql/planner/build_load.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bufio" @@ -37,8 +37,8 @@ const ( LoadWriteS3MinSize = 1 << 20 ) -func getNormalExternalProject(stmt *tree.Load, ctx CompilerContext, tableDef *TableDef, tblName string) ([]*Expr, map[string]int32, map[string]int32, *TableDef, error) { - var externalProject []*Expr +func getNormalExternalProject(stmt *tree.Load, ctx CompilerContext, tableDef *plan.TableDef, tblName string) ([]*plan.Expr, map[string]int32, map[string]int32, *plan.TableDef, error) { + var externalProject []*plan.Expr colToIndex := make(map[string]int32, 0) for i, col := range tableDef.Cols { if col.Name != catalog.FakePrimaryKeyColName { @@ -58,13 +58,13 @@ func getNormalExternalProject(stmt *tree.Load, ctx CompilerContext, tableDef *Ta return externalProject, colToIndex, colToIndex, tableDef, nil } -func getExternalWithColListProject(stmt *tree.Load, ctx CompilerContext, tableDef *TableDef, tblName string) ([]*Expr, map[string]int32, map[string]int32, *TableDef, error) { - var externalProject []*Expr +func getExternalWithColListProject(stmt *tree.Load, ctx CompilerContext, tableDef *plan.TableDef, tblName string) ([]*plan.Expr, map[string]int32, map[string]int32, *plan.TableDef, error) { + var externalProject []*plan.Expr colToIndex := make(map[string]int32, 0) tbColToDataCol := make(map[string]int32, 0) - var newCols []*ColDef + var newCols []*plan.ColDef - newTableDef := DeepCopyTableDef(tableDef, true) + newTableDef := plan.DeepCopyTableDef(tableDef, true) colPos := 0 for i, col := range stmt.Param.Tail.ColumnList { switch realCol := col.(type) { @@ -101,7 +101,7 @@ func getExternalWithColListProject(stmt *tree.Load, ctx CompilerContext, tableDe return externalProject, colToIndex, tbColToDataCol, newTableDef, nil } -func getExternalProject(stmt *tree.Load, ctx CompilerContext, tableDef *TableDef, tblName string) ([]*Expr, map[string]int32, map[string]int32, *TableDef, error) { +func getExternalProject(stmt *tree.Load, ctx CompilerContext, tableDef *plan.TableDef, tblName string) ([]*plan.Expr, map[string]int32, map[string]int32, *plan.TableDef, error) { if len(stmt.Param.Tail.ColumnList) == 0 { return getNormalExternalProject(stmt, ctx, tableDef, tblName) } else { @@ -211,7 +211,7 @@ func IgnoredLines(param *tree.ExternParam, ctx CompilerContext) (offset int64, e return csvReader.Pos(), nil } -func buildLoad(stmt *tree.Load, ctx CompilerContext, isPrepareStmt bool) (*Plan, error) { +func buildLoad(stmt *tree.Load, ctx CompilerContext, isPrepareStmt bool) (*plan.Plan, error) { start := time.Now() defer func() { v2.TxnStatementBuildLoadHistogram.Observe(time.Since(start).Seconds()) @@ -348,7 +348,7 @@ func buildLoad(stmt *tree.Load, ctx CompilerContext, isPrepareStmt bool) (*Plan, } // append hidden column to tableDef - newTableDef := DeepCopyTableDef(originTableDef, true) + newTableDef := plan.DeepCopyTableDef(originTableDef, true) err = buildInsertPlans(ctx, builder, bindCtx, nil, objRef, newTableDef, lastNodeId, ifExistAutoPkCol, nil, nil) if err != nil { return nil, err @@ -377,7 +377,7 @@ func buildLoad(stmt *tree.Load, ctx CompilerContext, isPrepareStmt bool) (*Plan, builder.tempOptimizeForDML() query.StmtType = plan.Query_INSERT - pn := &Plan{ + pn := &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, @@ -420,7 +420,7 @@ func checkFileExist(param *tree.ExternParam, ctx CompilerContext) (string, error return param.Filepath, nil } -func getProjectNode(stmt *tree.Load, ctx CompilerContext, node *plan.Node, tableDef *TableDef, colToIndex map[string]int32) (bool, error) { +func getProjectNode(stmt *tree.Load, ctx CompilerContext, node *plan.Node, tableDef *plan.TableDef, colToIndex map[string]int32) (bool, error) { tblName := string(stmt.Table.ObjectName) ifExistAutoPkCol := false node.ProjectList = make([]*plan.Expr, len(tableDef.Cols)) @@ -497,7 +497,7 @@ func InitNullMap(param *tree.ExternParam, ctx CompilerContext) error { return nil } -func checkNullMap(stmt *tree.Load, Cols []*ColDef, ctx CompilerContext) error { +func checkNullMap(stmt *tree.Load, Cols []*plan.ColDef, ctx CompilerContext) error { for k := range stmt.Param.NullMap { find := false for i := 0; i < len(Cols); i++ { @@ -533,7 +533,7 @@ func getCompressType(param *tree.ExternParam, filepath string) string { } } -func makeCastExpr(stmt *tree.Load, fileName string, tableDef *TableDef, node *plan.Node) []*plan.Expr { +func makeCastExpr(stmt *tree.Load, fileName string, tableDef *plan.TableDef, node *plan.Node) []*plan.Expr { ret := make([]*plan.Expr, 0) stringTyp := &plan.Type{ Id: int32(types.T_varchar), diff --git a/pkg/sql/plan/build_replace.go b/pkg/sql/planner/build_replace.go similarity index 97% rename from pkg/sql/plan/build_replace.go rename to pkg/sql/planner/build_replace.go index 383b46ce52cb0..f91afae5fe12b 100644 --- a/pkg/sql/plan/build_replace.go +++ b/pkg/sql/planner/build_replace.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -27,7 +27,7 @@ import ( v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" ) -func buildReplace(stmt *tree.Replace, ctx CompilerContext, isPrepareStmt bool, rewriteFromOnDuplicateKey bool) (p *Plan, err error) { +func buildReplace(stmt *tree.Replace, ctx CompilerContext, isPrepareStmt bool, rewriteFromOnDuplicateKey bool) (p *plan.Plan, err error) { start := time.Now() defer func() { v2.TxnStatementBuildReplaceHistogram.Observe(time.Since(start).Seconds()) @@ -73,7 +73,7 @@ func buildReplace(stmt *tree.Replace, ctx CompilerContext, isPrepareStmt bool, r } } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: &plan.Query{ StmtType: plan.Query_REPLACE, diff --git a/pkg/sql/plan/build_sample.go b/pkg/sql/planner/build_sample.go similarity index 99% rename from pkg/sql/plan/build_sample.go rename to pkg/sql/planner/build_sample.go index 177e9f95f8ae0..ffa566a39f0c6 100644 --- a/pkg/sql/plan/build_sample.go +++ b/pkg/sql/planner/build_sample.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/build_show.go b/pkg/sql/planner/build_show.go similarity index 96% rename from pkg/sql/plan/build_show.go rename to pkg/sql/planner/build_show.go index 018ee0a2bd9cf..33b91bc3db8a6 100644 --- a/pkg/sql/plan/build_show.go +++ b/pkg/sql/planner/build_show.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -39,10 +39,10 @@ const INFORMATION_SCHEMA = "information_schema" const SYSMOCATALOGPITR = "sys_mo_catalog_pitr" func buildShowCreateDatabase(stmt *tree.ShowCreateDatabase, - ctx CompilerContext) (*Plan, error) { + ctx CompilerContext) (*plan.Plan, error) { var err error - var snapshot *Snapshot + var snapshot *plan.Snapshot var snapshotSpec string if stmt.AtTsExpr != nil { if snapshot, err = getTimeStampByTsHint(ctx, stmt.AtTsExpr); err != nil { @@ -81,12 +81,12 @@ func buildShowCreateDatabase(stmt *tree.ShowCreateDatabase, return returnByRewriteSQL(ctx, sqlStr, plan.DataDefinition_SHOW_CREATEDATABASE) } -func buildShowCreateTable(stmt *tree.ShowCreateTable, ctx CompilerContext) (*Plan, error) { +func buildShowCreateTable(stmt *tree.ShowCreateTable, ctx CompilerContext) (*plan.Plan, error) { var err error tblName := stmt.Name.GetTableName() dbName := stmt.Name.GetDBName() - var snapshot *Snapshot + var snapshot *plan.Snapshot if stmt.AtTsExpr != nil { if snapshot, err = getTimeStampByTsHint(ctx, stmt.AtTsExpr); err != nil { return nil, err @@ -163,12 +163,12 @@ func buildShowCreateTable(stmt *tree.ShowCreateTable, ctx CompilerContext) (*Pla } // buildShowCreateView -func buildShowCreateView(stmt *tree.ShowCreateView, ctx CompilerContext) (*Plan, error) { +func buildShowCreateView(stmt *tree.ShowCreateView, ctx CompilerContext) (*plan.Plan, error) { var err error tblName := stmt.Name.GetTableName() dbName := stmt.Name.GetDBName() - var snapshot *Snapshot + var snapshot *plan.Snapshot if stmt.AtTsExpr != nil { if snapshot, err = getTimeStampByTsHint(ctx, stmt.AtTsExpr); err != nil { return nil, err @@ -208,7 +208,7 @@ func buildShowCreateView(stmt *tree.ShowCreateView, ctx CompilerContext) (*Plan, return returnByRewriteSQL(ctx, sqlStr, plan.DataDefinition_SHOW_CREATETABLE) } -func buildShowDatabases(stmt *tree.ShowDatabases, ctx CompilerContext) (*Plan, error) { +func buildShowDatabases(stmt *tree.ShowDatabases, ctx CompilerContext) (*plan.Plan, error) { if stmt.Like != nil && stmt.Where != nil { return nil, moerr.NewSyntaxError(ctx.GetContext(), "like clause and where clause cannot exist at the same time") } @@ -222,7 +222,7 @@ func buildShowDatabases(stmt *tree.ShowDatabases, ctx CompilerContext) (*Plan, e var sql string snapshotSpec := "" - var snapshot *Snapshot + var snapshot *plan.Snapshot if stmt.AtTsExpr != nil { if snapshot, err = getTimeStampByTsHint(ctx, stmt.AtTsExpr); err != nil { return nil, err @@ -255,7 +255,7 @@ func buildShowDatabases(stmt *tree.ShowDatabases, ctx CompilerContext) (*Plan, e return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowSequences(stmt *tree.ShowSequences, ctx CompilerContext) (*Plan, error) { +func buildShowSequences(stmt *tree.ShowSequences, ctx CompilerContext) (*plan.Plan, error) { // snapshot to fix dbName, err := databaseIsValid(stmt.DBName, ctx, nil) if err != nil { @@ -274,7 +274,7 @@ func buildShowSequences(stmt *tree.ShowSequences, ctx CompilerContext) (*Plan, e return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowTables(stmt *tree.ShowTables, ctx CompilerContext) (*Plan, error) { +func buildShowTables(stmt *tree.ShowTables, ctx CompilerContext) (*plan.Plan, error) { if stmt.Like != nil && stmt.Where != nil { return nil, moerr.NewSyntaxError(ctx.GetContext(), "like clause and where clause cannot exist at the same time") } @@ -288,7 +288,7 @@ func buildShowTables(stmt *tree.ShowTables, ctx CompilerContext) (*Plan, error) return nil, err } - var snapshot *Snapshot + var snapshot *plan.Snapshot snapshotSpec := "" if stmt.AtTsExpr != nil { if snapshot, err = getTimeStampByTsHint(ctx, stmt.AtTsExpr); err != nil { @@ -365,14 +365,14 @@ func buildShowTables(stmt *tree.ShowTables, ctx CompilerContext) (*Plan, error) return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowTableNumber(stmt *tree.ShowTableNumber, ctx CompilerContext) (*Plan, error) { +func buildShowTableNumber(stmt *tree.ShowTableNumber, ctx CompilerContext) (*plan.Plan, error) { accountId, err := ctx.GetAccountId() if err != nil { return nil, err } // snapshot to fix - var snapshot *Snapshot + var snapshot *plan.Snapshot dbName, err := databaseIsValid(stmt.DbName, ctx, snapshot) if err != nil { return nil, err @@ -410,7 +410,7 @@ func buildShowTableNumber(stmt *tree.ShowTableNumber, ctx CompilerContext) (*Pla return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowColumnNumber(stmt *tree.ShowColumnNumber, ctx CompilerContext) (*Plan, error) { +func buildShowColumnNumber(stmt *tree.ShowColumnNumber, ctx CompilerContext) (*plan.Plan, error) { accountId, err := ctx.GetAccountId() if err != nil { return nil, err @@ -433,11 +433,11 @@ func buildShowColumnNumber(stmt *tree.ShowColumnNumber, ctx CompilerContext) (*P ddlType := plan.DataDefinition_SHOW_COLUMNS var sql string - var sub *SubscriptionMeta + var sub *plan.SubscriptionMeta if obj.PubInfo != nil { accountId = uint32(obj.PubInfo.GetTenantId()) dbName = obj.SchemaName - sub = &SubscriptionMeta{ + sub = &plan.SubscriptionMeta{ AccountId: obj.PubInfo.GetTenantId(), } ctx.SetQueryingSubscription(sub) @@ -463,7 +463,7 @@ func buildShowColumnNumber(stmt *tree.ShowColumnNumber, ctx CompilerContext) (*P return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowTableValues(stmt *tree.ShowTableValues, ctx CompilerContext) (*Plan, error) { +func buildShowTableValues(stmt *tree.ShowTableValues, ctx CompilerContext) (*plan.Plan, error) { dbName, err := databaseIsValid(getSuitableDBName(stmt.Table.GetDBName(), stmt.DbName), ctx, nil) if err != nil { return nil, err @@ -479,7 +479,7 @@ func buildShowTableValues(stmt *tree.ShowTableValues, ctx CompilerContext) (*Pla } if obj.PubInfo != nil { - sub := &SubscriptionMeta{ + sub := &plan.SubscriptionMeta{ AccountId: obj.PubInfo.GetTenantId(), } ctx.SetQueryingSubscription(sub) @@ -517,7 +517,7 @@ func buildShowTableValues(stmt *tree.ShowTableValues, ctx CompilerContext) (*Pla return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowColumns(stmt *tree.ShowColumns, ctx CompilerContext) (*Plan, error) { +func buildShowColumns(stmt *tree.ShowColumns, ctx CompilerContext) (*plan.Plan, error) { if stmt.Like != nil && stmt.Where != nil { return nil, moerr.NewSyntaxError(ctx.GetContext(), "like clause and where clause cannot exist at the same time") } @@ -552,11 +552,11 @@ func buildShowColumns(stmt *tree.ShowColumns, ctx CompilerContext) (*Plan, error colNameToOriginName[col.Name] = colNameOrigin } - var sub *SubscriptionMeta + var sub *plan.SubscriptionMeta if obj.PubInfo != nil { dbName = obj.SchemaName accountId = uint32(obj.PubInfo.GetTenantId()) - sub = &SubscriptionMeta{ + sub = &plan.SubscriptionMeta{ AccountId: obj.PubInfo.GetTenantId(), } ctx.SetQueryingSubscription(sub) @@ -655,7 +655,7 @@ func buildShowColumns(stmt *tree.ShowColumns, ctx CompilerContext) (*Plan, error return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowTableStatus(stmt *tree.ShowTableStatus, ctx CompilerContext) (*Plan, error) { +func buildShowTableStatus(stmt *tree.ShowTableStatus, ctx CompilerContext) (*plan.Plan, error) { if stmt.Like != nil && stmt.Where != nil { return nil, moerr.NewSyntaxError(ctx.GetContext(), "like clause and where clause cannot exist at the same time") } @@ -742,7 +742,7 @@ func buildShowTableStatus(stmt *tree.ShowTableStatus, ctx CompilerContext) (*Pla } // TODO: Implement show target -func buildShowTarget(stmt *tree.ShowTarget, ctx CompilerContext) (*Plan, error) { +func buildShowTarget(stmt *tree.ShowTarget, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := "" switch stmt.Type { @@ -756,19 +756,19 @@ func buildShowTarget(stmt *tree.ShowTarget, ctx CompilerContext) (*Plan, error) return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowLocks(stmt *tree.ShowLocks, ctx CompilerContext) (*Plan, error) { +func buildShowLocks(stmt *tree.ShowLocks, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := "select 1 where 0" return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowNodeList(stmt *tree.ShowNodeList, ctx CompilerContext) (*Plan, error) { +func buildShowNodeList(stmt *tree.ShowNodeList, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := "select 1 where 0" return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowFunctionOrProcedureStatus(stmt *tree.ShowFunctionOrProcedureStatus, ctx CompilerContext) (*Plan, error) { +func buildShowFunctionOrProcedureStatus(stmt *tree.ShowFunctionOrProcedureStatus, ctx CompilerContext) (*plan.Plan, error) { var sql string ddlType := plan.DataDefinition_SHOW_TARGET @@ -796,7 +796,7 @@ func buildShowFunctionOrProcedureStatus(stmt *tree.ShowFunctionOrProcedureStatus return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowTriggers(stmt *tree.ShowTarget, ctx CompilerContext) (*Plan, error) { +func buildShowTriggers(stmt *tree.ShowTarget, ctx CompilerContext) (*plan.Plan, error) { if stmt.Like != nil && stmt.Where != nil { return nil, moerr.NewSyntaxError(ctx.GetContext(), "like clause and where clause cannot exist at the same time") } @@ -824,8 +824,8 @@ func buildShowTriggers(stmt *tree.ShowTarget, ctx CompilerContext) (*Plan, error return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowIndex(stmt *tree.ShowIndex, ctx CompilerContext) (*Plan, error) { - var snapshot *Snapshot +func buildShowIndex(stmt *tree.ShowIndex, ctx CompilerContext) (*plan.Plan, error) { + var snapshot *plan.Snapshot tmpDbName := getSuitableDBName(stmt.TableName.GetDBName(), stmt.DbName) dbName, err := databaseIsValid(tmpDbName, ctx, snapshot) if err != nil { @@ -844,7 +844,7 @@ func buildShowIndex(stmt *tree.ShowIndex, ctx CompilerContext) (*Plan, error) { ddlType := plan.DataDefinition_SHOW_INDEX if obj.PubInfo != nil { - sub := &SubscriptionMeta{ + sub := &plan.SubscriptionMeta{ AccountId: obj.PubInfo.GetTenantId(), } dbName = obj.SchemaName @@ -915,7 +915,7 @@ func buildShowIndex(stmt *tree.ShowIndex, ctx CompilerContext) (*Plan, error) { } // TODO: Improve SQL. Currently, Lack of the mata of grants -func buildShowGrants(stmt *tree.ShowGrants, ctx CompilerContext) (*Plan, error) { +func buildShowGrants(stmt *tree.ShowGrants, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET if stmt.ShowGrantType == tree.GrantForRole { @@ -936,7 +936,7 @@ func buildShowGrants(stmt *tree.ShowGrants, ctx CompilerContext) (*Plan, error) } } -func buildShowRoles(stmt *tree.ShowRolesStmt, ctx CompilerContext) (*Plan, error) { +func buildShowRoles(stmt *tree.ShowRolesStmt, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := fmt.Sprintf("SELECT role_name as `ROLE_NAME`, creator as `CREATOR`, created_time as `CREATED_TIME`, comments as `COMMENTS` FROM %s.mo_role;", MO_CATALOG_DB_NAME) @@ -950,7 +950,7 @@ func buildShowRoles(stmt *tree.ShowRolesStmt, ctx CompilerContext) (*Plan, error return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowStages(stmt *tree.ShowStages, ctx CompilerContext) (*Plan, error) { +func buildShowStages(stmt *tree.ShowStages, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := fmt.Sprintf("SELECT stage_name as `STAGE_NAME`, url as `URL`, stage_status as `STATUS`, comment as `COMMENT` FROM %s.mo_stages;", MO_CATALOG_DB_NAME) @@ -964,7 +964,7 @@ func buildShowStages(stmt *tree.ShowStages, ctx CompilerContext) (*Plan, error) return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowSnapShots(stmt *tree.ShowSnapShots, ctx CompilerContext) (*Plan, error) { +func buildShowSnapShots(stmt *tree.ShowSnapShots, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET sql := fmt.Sprintf("SELECT sname as `SNAPSHOT_NAME`, CAST_NANO_TO_TIMESTAMP(ts) as `TIMESTAMP`, level as `SNAPSHOT_LEVEL`, account_name as `ACCOUNT_NAME`, database_name as `DATABASE_NAME`, table_name as `TABLE_NAME` FROM %s.mo_snapshots ORDER BY ts DESC", MO_CATALOG_DB_NAME) @@ -975,7 +975,7 @@ func buildShowSnapShots(stmt *tree.ShowSnapShots, ctx CompilerContext) (*Plan, e return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowPitr(stmt *tree.ShowPitr, ctx CompilerContext) (*Plan, error) { +func buildShowPitr(stmt *tree.ShowPitr, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET curAccountId, err := ctx.GetAccountId() if err != nil { @@ -1014,13 +1014,13 @@ func buildShowPitr(stmt *tree.ShowPitr, ctx CompilerContext) (*Plan, error) { return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowAccountUpgrade(stmt *tree.ShowAccountUpgrade, ctx CompilerContext) (*Plan, error) { +func buildShowAccountUpgrade(stmt *tree.ShowAccountUpgrade, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_UPGRADE sql := fmt.Sprintf("select account_name as `account_name`, create_version as `current_version` from %s.mo_account order by account_id;", MO_CATALOG_DB_NAME) return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowVariables(stmt *tree.ShowVariables, ctx CompilerContext) (*Plan, error) { +func buildShowVariables(stmt *tree.ShowVariables, ctx CompilerContext) (*plan.Plan, error) { showVariables := &plan.ShowVariables{ Global: stmt.Global, } @@ -1052,7 +1052,7 @@ func buildShowVariables(stmt *tree.ShowVariables, ctx CompilerContext) (*Plan, e // showVariables.Where = append(showVariables.Where, exprs...) // } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_SHOW_VARIABLES, @@ -1064,20 +1064,20 @@ func buildShowVariables(stmt *tree.ShowVariables, ctx CompilerContext) (*Plan, e }, nil } -func buildShowStatus(stmt *tree.ShowStatus, ctx CompilerContext) (*Plan, error) { +func buildShowStatus(stmt *tree.ShowStatus, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_STATUS sql := "select '' as `Variable_name`, '' as `Value` where 0" return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowProcessList(ctx CompilerContext) (*Plan, error) { +func buildShowProcessList(ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_PROCESSLIST // "show processlist" is implemented by table function processlist(). sql := "select * from processlist() a" return returnByRewriteSQL(ctx, sql, ddlType) } -func buildShowCreatePublications(stmt *tree.ShowCreatePublications, ctx CompilerContext) (*Plan, error) { +func buildShowCreatePublications(stmt *tree.ShowCreatePublications, ctx CompilerContext) (*plan.Plan, error) { ddlType := plan.DataDefinition_SHOW_TARGET accountId, err := ctx.GetAccountId() if err != nil { @@ -1089,7 +1089,7 @@ func buildShowCreatePublications(stmt *tree.ShowCreatePublications, ctx Compiler } func returnByRewriteSQL(ctx CompilerContext, sql string, - ddlType plan.DataDefinition_DdlType) (*Plan, error) { + ddlType plan.DataDefinition_DdlType) (*plan.Plan, error) { newStmt, err := getRewriteSQLStmt(ctx, sql) if err != nil { return nil, err @@ -1099,7 +1099,7 @@ func returnByRewriteSQL(ctx CompilerContext, sql string, } func returnByWhereAndBaseSQL(ctx CompilerContext, baseSQL string, - where *tree.Where, ddlType plan.DataDefinition_DdlType) (*Plan, error) { + where *tree.Where, ddlType plan.DataDefinition_DdlType) (*plan.Plan, error) { sql := fmt.Sprintf("SELECT * FROM (%s) tbl", baseSQL) // logutil.Info(sql) newStmt, err := getRewriteSQLStmt(ctx, sql) @@ -1113,7 +1113,7 @@ func returnByWhereAndBaseSQL(ctx CompilerContext, baseSQL string, } func returnByLikeAndSQL(ctx CompilerContext, sql string, like *tree.ComparisonExpr, - ddlType plan.DataDefinition_DdlType) (*Plan, error) { + ddlType plan.DataDefinition_DdlType) (*plan.Plan, error) { newStmt, err := getRewriteSQLStmt(ctx, sql) defer newStmt.Free() if err != nil { @@ -1153,7 +1153,7 @@ func getRewriteSQLStmt(ctx CompilerContext, sql string) (tree.Statement, error) } func getReturnDdlBySelectStmt(ctx CompilerContext, stmt tree.Statement, - ddlType plan.DataDefinition_DdlType) (*Plan, error) { + ddlType plan.DataDefinition_DdlType) (*plan.Plan, error) { queryPlan, err := BuildPlan(ctx, stmt, false) if err != nil { return nil, err diff --git a/pkg/sql/plan/build_show_util.go b/pkg/sql/planner/build_show_util.go similarity index 98% rename from pkg/sql/plan/build_show_util.go rename to pkg/sql/planner/build_show_util.go index 6a1114d0e5b91..0cb7a470b7fc1 100644 --- a/pkg/sql/plan/build_show_util.go +++ b/pkg/sql/planner/build_show_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -35,7 +35,7 @@ import ( func ConstructCreateTableSQL( ctx CompilerContext, tableDef *plan.TableDef, - snapshot *Snapshot, + snapshot *plan.Snapshot, useDbName bool, cloneStmt *tree.CloneTable, ) (string, tree.Statement, error) { @@ -282,7 +282,7 @@ func ConstructCreateTableSQL( } } - updateFKTableDef := func(fkDef *TableDef) (*TableDef, error) { + updateFKTableDef := func(fkDef *plan.TableDef) (*plan.TableDef, error) { if cloneStmt == nil || cloneStmt.StmtType == tree.NoClone { return fkDef, nil } @@ -294,10 +294,10 @@ func ConstructCreateTableSQL( var ( referType int - tempTableDef *TableDef + tempTableDef *plan.TableDef ) - update := func(snap *Snapshot) error { + update := func(snap *plan.Snapshot) error { if _, tempTableDef, err = ctx.Resolve(schemaName, fkDef.Name, snap); err != nil { return err } @@ -354,7 +354,7 @@ func ConstructCreateTableSQL( colOriginNames[i] = colIdToOriginName[colId] } - var fkTableDef *TableDef + var fkTableDef *plan.TableDef //fk self reference if fk.ForeignTbl == 0 { fkTableDef = tableDef diff --git a/pkg/sql/plan/build_show_util_test.go b/pkg/sql/planner/build_show_util_test.go similarity index 98% rename from pkg/sql/plan/build_show_util_test.go rename to pkg/sql/planner/build_show_util_test.go index ce3182c29e5ea..67ddd2741599c 100644 --- a/pkg/sql/plan/build_show_util_test.go +++ b/pkg/sql/planner/build_show_util_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" @@ -181,7 +181,7 @@ func Test_SingleShowCreateTable(t *testing.T) { } } -func buildTestCreateTableStmt(opt Optimizer, sql string) (*TableDef, error) { +func buildTestCreateTableStmt(opt Optimizer, sql string) (*plan.TableDef, error) { statements, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { return nil, err diff --git a/pkg/sql/plan/build_table_clone.go b/pkg/sql/planner/build_table_clone.go similarity index 96% rename from pkg/sql/plan/build_table_clone.go rename to pkg/sql/planner/build_table_clone.go index a438df9e4a412..50ac690014ac3 100644 --- a/pkg/sql/plan/build_table_clone.go +++ b/pkg/sql/planner/build_table_clone.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -32,14 +32,14 @@ import ( func buildCloneTable( stmt *tree.CloneTable, ctx CompilerContext, -) (*Plan, error) { +) (*plan.Plan, error) { var ( err error - srcTblDef *TableDef - srcObj *ObjectRef + srcTblDef *plan.TableDef + srcObj *plan.ObjectRef - createTablePlan *Plan + createTablePlan *plan.Plan builder *QueryBuilder bindCtx *BindContext @@ -54,8 +54,8 @@ func buildCloneTable( }() if stmt.IsRestore { - snapshot := &Snapshot{ - Tenant: &SnapshotTenant{ + snapshot := &plan.Snapshot{ + Tenant: &plan.SnapshotTenant{ TenantID: stmt.FromAccount, }, } @@ -152,7 +152,7 @@ func buildCloneTable( return nil, err } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Ddl{ Ddl: &plan.DataDefinition{ DdlType: plan.DataDefinition_CREATE_TABLE_WITH_CLONE, @@ -176,9 +176,9 @@ func checkPrivilege( opAccount uint32, srcAccount uint32, subMeta *plan.SubscriptionMeta, - srcTblDef *TableDef, + srcTblDef *plan.TableDef, dstDatabaseName string, - scanSnapshot *Snapshot, + scanSnapshot *plan.Snapshot, cloneType tree.CloneStmtType, ) (err error) { diff --git a/pkg/sql/plan/build_test.go b/pkg/sql/planner/build_test.go similarity index 98% rename from pkg/sql/plan/build_test.go rename to pkg/sql/planner/build_test.go index 8720634848a9a..8c1617bd32340 100644 --- a/pkg/sql/plan/build_test.go +++ b/pkg/sql/planner/build_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -23,8 +23,6 @@ import ( "testing" "github.com/golang/mock/gomock" - "github.com/stretchr/testify/assert" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" moruntime "github.com/matrixorigin/matrixone/pkg/common/runtime" @@ -34,6 +32,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/util/executor" + "github.com/stretchr/testify/assert" ) func BenchmarkInsert(b *testing.B) { @@ -920,7 +919,7 @@ func TestShow(t *testing.T) { func TestResultColumns(t *testing.T) { mock := NewMockOptimizer(false) - getColumns := func(sql string) []*ColDef { + getColumns := func(sql string) []*plan.ColDef { logicPlan, err := runOneStmt(mock, t, sql) if err != nil { t.Fatalf("sql %s build plan error:%+v", sql, err) @@ -974,7 +973,7 @@ func TestResultColumns(t *testing.T) { func TestResultColumns2(t *testing.T) { mock := NewMockOptimizer(true) - getColumns := func(sql string) []*ColDef { + getColumns := func(sql string) []*plan.ColDef { logicPlan, err := runOneStmt(mock, t, sql) if err != nil { t.Fatalf("sql %s build plan error:%+v", sql, err) @@ -1046,12 +1045,12 @@ func TestVisitRule(t *testing.T) { sql := "select * from nation where n_nationkey > 10 or n_nationkey=@int_var or abs(-1) > 1" mock := NewMockOptimizer(false) ctx := context.TODO() - plan, err := runOneStmt(mock, t, sql) + pl, err := runOneStmt(mock, t, sql) if err != nil { t.Fatalf("should not error, sql=%s", sql) } getParamRule := NewGetParamRule() - vp := NewVisitPlan(plan, []VisitPlanRule{getParamRule}) + vp := NewVisitPlan(pl, []VisitPlanRule{getParamRule}) err = vp.Visit(context.TODO()) if err != nil { t.Fatalf("should not error, sql=%s", sql) @@ -1060,17 +1059,17 @@ func TestVisitRule(t *testing.T) { args := getParamRule.params resetParamOrderRule := NewResetParamOrderRule(args) - vp = NewVisitPlan(plan, []VisitPlanRule{resetParamOrderRule}) + vp = NewVisitPlan(pl, []VisitPlanRule{resetParamOrderRule}) err = vp.Visit(ctx) if err != nil { t.Fatalf("should not error, sql=%s", sql) } - params := []*Expr{ + params := []*plan.Expr{ makePlan2Int64ConstExprWithType(10), } resetParamRule := NewResetParamRefRule(ctx, params) - vp = NewVisitPlan(plan, []VisitPlanRule{resetParamRule}) + vp = NewVisitPlan(pl, []VisitPlanRule{resetParamRule}) err = vp.Visit(ctx) if err != nil { t.Fatalf("should not error, sql=%s", sql) @@ -1101,7 +1100,7 @@ func TestVisitRule2(t *testing.T) { t.Fatalf("should not error, sql=%s", sql) } - if qry, ok := queryPlan.Plan.(*Plan_Query); ok { + if qry, ok := queryPlan.Plan.(*plan.Plan_Query); ok { if f, ok := qry.Query.Nodes[1].FilterList[0].Expr.(*plan.Expr_F); ok { f.F.Args[1] = &plan.Expr{ Typ: plan.Type{ @@ -1117,7 +1116,7 @@ func TestVisitRule2(t *testing.T) { } } - params := []*Expr{ + params := []*plan.Expr{ makePlan2Int64ConstExprWithType(10), } resetParamRule := NewResetParamRefRule(ctx, params) @@ -1141,17 +1140,17 @@ func getJSON(v any, t *testing.T) []byte { return out.Bytes() } -func testDeepCopy(logicPlan *Plan) { +func testDeepCopy(logicPlan *plan.Plan) { switch logicPlan.Plan.(type) { case *plan.Plan_Query: - _ = DeepCopyPlan(logicPlan) + _ = plan.DeepCopyPlan(logicPlan) case *plan.Plan_Ddl: - _ = DeepCopyPlan(logicPlan) + _ = plan.DeepCopyPlan(logicPlan) case *plan.Plan_Dcl: } } -func outPutPlan(logicPlan *Plan, toFile bool, t *testing.T) { +func outPutPlan(logicPlan *plan.Plan, toFile bool, t *testing.T) { var json []byte switch logicPlan.Plan.(type) { case *plan.Plan_Query: @@ -1173,7 +1172,7 @@ func outPutPlan(logicPlan *Plan, toFile bool, t *testing.T) { } } -func runOneStmt(opt Optimizer, t *testing.T, sql string) (*Plan, error) { +func runOneStmt(opt Optimizer, t *testing.T, sql string) (*plan.Plan, error) { stmts, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { t.Fatalf("%+v", err) diff --git a/pkg/sql/plan/build_transation.go b/pkg/sql/planner/build_transation.go similarity index 93% rename from pkg/sql/plan/build_transation.go rename to pkg/sql/planner/build_transation.go index 1eeb56e41ec93..6484f363cf7d0 100644 --- a/pkg/sql/plan/build_transation.go +++ b/pkg/sql/planner/build_transation.go @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" ) -func buildBeginTransaction(stmt *tree.BeginTransaction, ctx CompilerContext) (*Plan, error) { +func buildBeginTransaction(stmt *tree.BeginTransaction, ctx CompilerContext) (*plan.Plan, error) { beginTransation := &plan.TransationBegin{} switch stmt.Modes.RwMode { case tree.READ_WRITE_MODE_NONE: @@ -30,7 +30,7 @@ func buildBeginTransaction(stmt *tree.BeginTransaction, ctx CompilerContext) (*P beginTransation.Mode = plan.TransationBegin_READ_WRITE } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Tcl{ Tcl: &plan.TransationControl{ TclType: plan.TransationControl_BEGIN, @@ -42,7 +42,7 @@ func buildBeginTransaction(stmt *tree.BeginTransaction, ctx CompilerContext) (*P }, nil } -func buildCommitTransaction(stmt *tree.CommitTransaction, ctx CompilerContext) (*Plan, error) { +func buildCommitTransaction(stmt *tree.CommitTransaction, ctx CompilerContext) (*plan.Plan, error) { commitTransation := &plan.TransationCommit{} switch stmt.Type { case tree.COMPLETION_TYPE_CHAIN: @@ -52,7 +52,7 @@ func buildCommitTransaction(stmt *tree.CommitTransaction, ctx CompilerContext) ( case tree.COMPLETION_TYPE_NO_CHAIN: commitTransation.CompletionType = plan.TransationCompletionType_NO_CHAIN } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Tcl{ Tcl: &plan.TransationControl{ TclType: plan.TransationControl_COMMIT, @@ -64,7 +64,7 @@ func buildCommitTransaction(stmt *tree.CommitTransaction, ctx CompilerContext) ( }, nil } -func buildRollbackTransaction(stmt *tree.RollbackTransaction, ctx CompilerContext) (*Plan, error) { +func buildRollbackTransaction(stmt *tree.RollbackTransaction, ctx CompilerContext) (*plan.Plan, error) { rollbackTransation := &plan.TransationRollback{} switch stmt.Type { case tree.COMPLETION_TYPE_CHAIN: @@ -74,7 +74,7 @@ func buildRollbackTransaction(stmt *tree.RollbackTransaction, ctx CompilerContex case tree.COMPLETION_TYPE_NO_CHAIN: rollbackTransation.CompletionType = plan.TransationCompletionType_NO_CHAIN } - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Tcl{ Tcl: &plan.TransationControl{ TclType: plan.TransationControl_ROLLBACK, diff --git a/pkg/sql/plan/build_update.go b/pkg/sql/planner/build_update.go similarity index 98% rename from pkg/sql/plan/build_update.go rename to pkg/sql/planner/build_update.go index 92595683321ba..7bf36d92ad2bb 100644 --- a/pkg/sql/plan/build_update.go +++ b/pkg/sql/planner/build_update.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "time" @@ -24,7 +24,7 @@ import ( v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" ) -func buildTableUpdate(stmt *tree.Update, ctx CompilerContext, isPrepareStmt bool) (p *Plan, err error) { +func buildTableUpdate(stmt *tree.Update, ctx CompilerContext, isPrepareStmt bool) (p *plan.Plan, err error) { start := time.Now() defer func() { v2.TxnStatementBuildUpdateHistogram.Observe(time.Since(start).Seconds()) @@ -91,14 +91,14 @@ func buildTableUpdate(stmt *tree.Update, ctx CompilerContext, isPrepareStmt bool builder.tempOptimizeForDML() reCheckifNeedLockWholeTable(builder) query.StmtType = plan.Query_UPDATE - return &Plan{ + return &plan.Plan{ Plan: &plan.Plan_Query{ Query: query, }, }, err } -func isDefaultValExpr(e *Expr) bool { +func isDefaultValExpr(e *plan.Expr) bool { if ce, ok := e.Expr.(*plan.Expr_Lit); ok { _, isDefVal := ce.Lit.Value.(*plan.Literal_Defaultval) return isDefVal @@ -145,7 +145,7 @@ func rewriteUpdateQueryLastNode(builder *QueryBuilder, planCtxs []*dmlPlanCtx, l } else { pos := idx + colIdx if col.OnUpdate != nil && col.OnUpdate.Expr != nil { - newDefExpr := DeepCopyExpr(col.OnUpdate.Expr) + newDefExpr := plan.DeepCopyExpr(col.OnUpdate.Expr) err = replaceFuncId(builder.GetContext(), newDefExpr) if err != nil { return err diff --git a/pkg/sql/plan/build_util.go b/pkg/sql/planner/build_util.go similarity index 97% rename from pkg/sql/plan/build_util.go rename to pkg/sql/planner/build_util.go index 5991459264c9e..668a30c741f59 100644 --- a/pkg/sql/plan/build_util.go +++ b/pkg/sql/planner/build_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -28,9 +28,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -297,7 +297,7 @@ func buildDefaultExpr(col *tree.ColumnTableDef, typ plan.Type, proc *process.Pro } // try to calculate default value, return err if fails - newExpr, err := ConstantFold(batch.EmptyForConstFoldBatch, DeepCopyExpr(defaultExpr), proc, false, true) + newExpr, err := ConstantFold(batch.EmptyForConstFoldBatch, plan.DeepCopyExpr(defaultExpr), proc, false, true) if err != nil { return nil, err } @@ -374,7 +374,7 @@ func isNullAstExpr(expr tree.Expr) bool { return ok && v.ValType == tree.P_null } -func convertValueIntoBool(name string, args []*Expr, isLogic bool) error { +func convertValueIntoBool(name string, args []*plan.Expr, isLogic bool) error { if !isLogic && (len(args) != 2 || (args[0].Typ.Id != int32(types.T_bool) && args[1].Typ.Id != int32(types.T_bool))) { return nil } @@ -398,8 +398,8 @@ func convertValueIntoBool(name string, args []*Expr, isLogic bool) error { return nil } -func getFunctionObjRef(funcID int64, name string) *ObjectRef { - return &ObjectRef{ +func getFunctionObjRef(funcID int64, name string) *plan.ObjectRef { + return &plan.ObjectRef{ Obj: funcID, ObjName: name, } @@ -464,14 +464,14 @@ func getFunctionObjRef(funcID int64, name string) *ObjectRef { // }, nil // } -func getDefaultExpr(ctx context.Context, d *plan.ColDef) (*Expr, error) { +func getDefaultExpr(ctx context.Context, d *plan.ColDef) (*plan.Expr, error) { if !d.Default.NullAbility && d.Default.Expr == nil && !d.Typ.AutoIncr { return nil, moerr.NewInvalidInputf(ctx, "invalid default value for column '%s'", d.Name) } if d.Default.Expr == nil { - return &Expr{ + return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, @@ -481,12 +481,12 @@ func getDefaultExpr(ctx context.Context, d *plan.ColDef) (*Expr, error) { }, }, nil } - newDefExpr := DeepCopyExpr(d.Default.Expr) + newDefExpr := plan.DeepCopyExpr(d.Default.Expr) err := replaceFuncId(ctx, newDefExpr) return newDefExpr, err } -func replaceFuncId(ctx context.Context, expr *Expr) error { +func replaceFuncId(ctx context.Context, expr *plan.Expr) error { switch fun := expr.Expr.(type) { case *plan.Expr_F: for _, arg := range fun.F.Args { diff --git a/pkg/sql/plan/build_util_test.go b/pkg/sql/planner/build_util_test.go similarity index 94% rename from pkg/sql/plan/build_util_test.go rename to pkg/sql/planner/build_util_test.go index 3746744fce10f..a0af70012b2ac 100644 --- a/pkg/sql/plan/build_util_test.go +++ b/pkg/sql/planner/build_util_test.go @@ -12,28 +12,27 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" "testing" + "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" ) func Test_replaceFuncId(t *testing.T) { - case1 := &Expr{ + case1 := &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ - Func: &ObjectRef{ + Func: &plan.ObjectRef{ ObjName: "current_timestamp", Obj: function.CURRENT_TIMESTAMP, }, - Args: []*Expr{ + Args: []*plan.Expr{ { Expr: &plan.Expr_Col{ Col: &plan.ColRef{ diff --git a/pkg/sql/plan/build_values.go b/pkg/sql/planner/build_values.go similarity index 89% rename from pkg/sql/plan/build_values.go rename to pkg/sql/planner/build_values.go index d5b6573b74caf..430988dacc881 100644 --- a/pkg/sql/plan/build_values.go +++ b/pkg/sql/planner/build_values.go @@ -12,13 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" ) -func buildValues(stmt *tree.ValuesStatement, ctx CompilerContext, isPrepareStmt bool) (p *Plan, err error) { +func buildValues(stmt *tree.ValuesStatement, ctx CompilerContext, isPrepareStmt bool) (p *plan.Plan, err error) { selectStmt := &tree.Select{ Select: &tree.ValuesClause{ Rows: stmt.Rows, diff --git a/pkg/sql/plan/build_vector_index_util.go b/pkg/sql/planner/build_vector_index_util.go similarity index 75% rename from pkg/sql/plan/build_vector_index_util.go rename to pkg/sql/planner/build_vector_index_util.go index 6be7547385e7d..19a0ee83d8a41 100644 --- a/pkg/sql/plan/build_vector_index_util.go +++ b/pkg/sql/planner/build_vector_index_util.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" @@ -32,8 +32,8 @@ var ( ) func makeIvfFlatIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, - indexTableDefs []*TableDef, idxRefs []*ObjectRef, idxTableId int32) (int32, []*Expr) { - scanNodeProjections := make([]*Expr, len(indexTableDefs[idxTableId].Cols)) + indexTableDefs []*plan.TableDef, idxRefs []*plan.ObjectRef, idxTableId int32) (int32, []*plan.Expr) { + scanNodeProjections := make([]*plan.Expr, len(indexTableDefs[idxTableId].Cols)) for colIdx, column := range indexTableDefs[idxTableId].Cols { scanNodeProjections[colIdx] = &plan.Expr{ Typ: column.Typ, @@ -45,7 +45,7 @@ func makeIvfFlatIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, }, } } - centroidsScanId := builder.appendNode(&Node{ + centroidsScanId := builder.appendNode(&plan.Node{ NodeType: plan.Node_TABLE_SCAN, ObjRef: idxRefs[idxTableId], TableDef: indexTableDefs[idxTableId], @@ -54,26 +54,26 @@ func makeIvfFlatIndexTblScan(builder *QueryBuilder, bindCtx *BindContext, return centroidsScanId, scanNodeProjections } -func makeMetaTblScanWhereKeyEqVersion(builder *QueryBuilder, bindCtx *BindContext, indexTableDefs []*TableDef, idxRefs []*ObjectRef) (int32, error) { +func makeMetaTblScanWhereKeyEqVersion(builder *QueryBuilder, bindCtx *BindContext, indexTableDefs []*plan.TableDef, idxRefs []*plan.ObjectRef) (int32, error) { metaTableScanId, scanCols := makeIvfFlatIndexTblScan(builder, bindCtx, indexTableDefs, idxRefs, 0) - whereKeyEqVersion, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ - DeepCopyExpr(scanCols[0]), + whereKeyEqVersion, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ + plan.DeepCopyExpr(scanCols[0]), MakePlan2StringConstExprWithType("version"), }) if err != nil { return -1, err } - builder.qry.Nodes[metaTableScanId].FilterList = []*Expr{whereKeyEqVersion} + builder.qry.Nodes[metaTableScanId].FilterList = []*plan.Expr{whereKeyEqVersion} return metaTableScanId, nil } func makeCrossJoinCentroidsMetaForCurrVersion(builder *QueryBuilder, bindCtx *BindContext, - indexTableDefs []*TableDef, idxRefs []*ObjectRef, metaTableScanId int32) (int32, error) { + indexTableDefs []*plan.TableDef, idxRefs []*plan.ObjectRef, metaTableScanId int32) (int32, error) { centroidsScanId, _ := makeIvfFlatIndexTblScan(builder, bindCtx, indexTableDefs, idxRefs, 1) metaProjection := getProjectionByLastNode(builder, metaTableScanId) - metaProjectValueCol := DeepCopyExpr(metaProjection[1]) + metaProjectValueCol := plan.DeepCopyExpr(metaProjection[1]) metaProjectValueCol.Expr.(*plan.Expr_Col).Col.RelPos = 1 prevMetaScanCastValAsBigInt, err := makePlan2CastExpr(builder.GetContext(), metaProjectValueCol, makePlan2Type(&bigIntType)) if err != nil { @@ -83,7 +83,7 @@ func makeCrossJoinCentroidsMetaForCurrVersion(builder *QueryBuilder, bindCtx *Bi // 1: centroids.centroid_id // 2: centroids.centroid prevCentroidScanProjection := getProjectionByLastNode(builder, centroidsScanId)[:3] - whereCentroidVersionEqCurrVersion, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*Expr{ + whereCentroidVersionEqCurrVersion, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "=", []*plan.Expr{ prevCentroidScanProjection[0], prevMetaScanCastValAsBigInt, }) @@ -96,19 +96,19 @@ func makeCrossJoinCentroidsMetaForCurrVersion(builder *QueryBuilder, bindCtx *Bi JoinType: plan.Node_INNER, Children: []int32{centroidsScanId, metaTableScanId}, ProjectList: prevCentroidScanProjection, - OnList: []*Expr{whereCentroidVersionEqCurrVersion}, + OnList: []*plan.Expr{whereCentroidVersionEqCurrVersion}, }, bindCtx) return joinMetaAndCentroidsId, nil } -func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, tableDef *TableDef, lastNodeId int32, currVersionCentroids int32, typeOriginPk Type, posOriginPk int, typeOriginVecColumn Type, posOriginVecColumn int, optype string) int32 { +func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, tableDef *plan.TableDef, lastNodeId int32, currVersionCentroids int32, typeOriginPk plan.Type, posOriginPk int, typeOriginVecColumn plan.Type, posOriginVecColumn int, optype string) int32 { joinTblAndCentroidsUsingCrossL2Join := builder.appendNode(&plan.Node{ NodeType: plan.Node_JOIN, JoinType: plan.Node_L2, ExtraOptions: optype, Children: []int32{lastNodeId, currVersionCentroids}, - ProjectList: []*Expr{ + ProjectList: []*plan.Expr{ { // centroids.version Typ: makePlan2TypeValue(&bigIntType), Expr: &plan.Expr_Col{ @@ -130,7 +130,7 @@ func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, ta }, }, { // tbl.pk - Typ: *DeepCopyType(&typeOriginPk), + Typ: *plan.DeepCopyType(&typeOriginPk), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -140,7 +140,7 @@ func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, ta }, }, { // tbl.embedding - Typ: *DeepCopyType(&typeOriginVecColumn), + Typ: *plan.DeepCopyType(&typeOriginVecColumn), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -150,9 +150,9 @@ func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, ta }, }, }, - OnList: []*Expr{ + OnList: []*plan.Expr{ { // centroids.centroid - Typ: *DeepCopyType(&typeOriginVecColumn), + Typ: *plan.DeepCopyType(&typeOriginVecColumn), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 1, @@ -162,7 +162,7 @@ func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, ta }, }, { // tbl.embedding - Typ: *DeepCopyType(&typeOriginVecColumn), + Typ: *plan.DeepCopyType(&typeOriginVecColumn), Expr: &plan.Expr_Col{ Col: &plan.ColRef{ RelPos: 0, @@ -179,14 +179,14 @@ func makeTblCrossJoinL2Centroids(builder *QueryBuilder, bindCtx *BindContext, ta func makeFinalProject(builder *QueryBuilder, bindCtx *BindContext, joinTblAndCentroidsUsingCrossL2Join int32) (int32, error) { var finalProjections = getProjectionByLastNode(builder, joinTblAndCentroidsUsingCrossL2Join) - centroidsVersion := DeepCopyExpr(finalProjections[0]) - centroidsId := DeepCopyExpr(finalProjections[1]) - tblPk := DeepCopyExpr(finalProjections[2]) - tblEmbedding := DeepCopyExpr(finalProjections[3]) + centroidsVersion := plan.DeepCopyExpr(finalProjections[0]) + centroidsId := plan.DeepCopyExpr(finalProjections[1]) + tblPk := plan.DeepCopyExpr(finalProjections[2]) + tblEmbedding := plan.DeepCopyExpr(finalProjections[3]) cpKey, err := BindFuncExprImplByPlanExpr(builder.GetContext(), "serial", []*plan.Expr{ - DeepCopyExpr(finalProjections[0]), - DeepCopyExpr(finalProjections[1]), - DeepCopyExpr(finalProjections[2]), + plan.DeepCopyExpr(finalProjections[0]), + plan.DeepCopyExpr(finalProjections[1]), + plan.DeepCopyExpr(finalProjections[2]), }) if err != nil { return -1, err @@ -196,7 +196,7 @@ func makeFinalProject(builder *QueryBuilder, bindCtx *BindContext, joinTblAndCen &plan.Node{ NodeType: plan.Node_PROJECT, Children: []int32{joinTblAndCentroidsUsingCrossL2Join}, - ProjectList: []*Expr{centroidsVersion, centroidsId, tblPk, tblEmbedding, cpKey}, + ProjectList: []*plan.Expr{centroidsVersion, centroidsId, tblPk, tblEmbedding, cpKey}, }, bindCtx) return projectWithCpKey, nil diff --git a/pkg/sql/plan/comparison_cast_optimization_test.go b/pkg/sql/planner/comparison_cast_optimization_test.go similarity index 99% rename from pkg/sql/plan/comparison_cast_optimization_test.go rename to pkg/sql/planner/comparison_cast_optimization_test.go index cd93cdebb772e..9fdc4f51ba449 100644 --- a/pkg/sql/plan/comparison_cast_optimization_test.go +++ b/pkg/sql/planner/comparison_cast_optimization_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/const.go b/pkg/sql/planner/const.go similarity index 98% rename from pkg/sql/plan/const.go rename to pkg/sql/planner/const.go index 2b4f495146393..f2a00c4b39b78 100644 --- a/pkg/sql/plan/const.go +++ b/pkg/sql/planner/const.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner const ( /* diff --git a/pkg/sql/plan/current_account.go b/pkg/sql/planner/current_account.go similarity index 99% rename from pkg/sql/plan/current_account.go rename to pkg/sql/planner/current_account.go index 2fbfe7c92310f..90b92b719f07d 100644 --- a/pkg/sql/plan/current_account.go +++ b/pkg/sql/planner/current_account.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/default_binder.go b/pkg/sql/planner/default_binder.go similarity index 96% rename from pkg/sql/plan/default_binder.go rename to pkg/sql/planner/default_binder.go index a26720d7a2b43..6ff7b91908105 100644 --- a/pkg/sql/plan/default_binder.go +++ b/pkg/sql/planner/default_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" ) -func NewDefaultBinder(sysCtx context.Context, builder *QueryBuilder, ctx *BindContext, typ Type, cols []string) *DefaultBinder { +func NewDefaultBinder(sysCtx context.Context, builder *QueryBuilder, ctx *BindContext, typ plan.Type, cols []string) *DefaultBinder { b := &DefaultBinder{typ: typ, cols: cols} b.sysCtx = sysCtx b.builder = builder diff --git a/pkg/sql/plan/distinct_agg.go b/pkg/sql/planner/distinct_agg.go similarity index 96% rename from pkg/sql/plan/distinct_agg.go rename to pkg/sql/planner/distinct_agg.go index d702142385599..2d854c3700b21 100644 --- a/pkg/sql/plan/distinct_agg.go +++ b/pkg/sql/planner/distinct_agg.go @@ -12,11 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) func (builder *QueryBuilder) optimizeDistinctAgg(nodeID int32) { diff --git a/pkg/sql/plan/dml_context.go b/pkg/sql/planner/dml_context.go similarity index 99% rename from pkg/sql/plan/dml_context.go rename to pkg/sql/planner/dml_context.go index 6eee21abaf93b..cc976d10db989 100644 --- a/pkg/sql/plan/dml_context.go +++ b/pkg/sql/planner/dml_context.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" diff --git a/pkg/sql/plan/explain/column_prune_test.go b/pkg/sql/planner/explain/column_prune_test.go similarity index 97% rename from pkg/sql/plan/explain/column_prune_test.go rename to pkg/sql/planner/explain/column_prune_test.go index 499df340d588c..cfd3d186febb8 100644 --- a/pkg/sql/plan/explain/column_prune_test.go +++ b/pkg/sql/planner/explain/column_prune_test.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/stretchr/testify/require" ) @@ -178,7 +178,7 @@ func TestSingleTableQueryPrune(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { - mock := plan2.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) logicPlan, err := buildOneStmt(mock, t, c.sql) if err != nil { t.Fatalf("%+v", err) @@ -382,7 +382,7 @@ func TestJoinQueryPrune(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { - mock := plan2.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) logicPlan, err := buildOneStmt(mock, t, c.sql) if err != nil { t.Fatalf("%+v", err) @@ -453,7 +453,7 @@ func TestNestedQueryPrune(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { - mock := plan2.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) logicPlan, err := buildOneStmt(mock, t, c.sql) if err != nil { t.Fatalf("%+v", err) @@ -540,7 +540,7 @@ func TestDerivedTableQueryPrune(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { - mock := plan2.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) logicPlan, err := buildOneStmt(mock, t, c.sql) if err != nil { t.Fatalf("%+v", err) @@ -555,14 +555,14 @@ func TestDerivedTableQueryPrune(t *testing.T) { } -func buildOneStmt(opt plan2.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { +func buildOneStmt(opt planner.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { stmts, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { t.Fatalf("%+v", err) } // this sql always return one stmt ctx := opt.CurrentContext() - return plan2.BuildPlan(ctx, stmts[0], false) + return planner.BuildPlan(ctx, stmts[0], false) } type Entry[K any, V any] struct { diff --git a/pkg/sql/plan/explain/explain_cost_test.go b/pkg/sql/planner/explain/explain_cost_test.go similarity index 100% rename from pkg/sql/plan/explain/explain_cost_test.go rename to pkg/sql/planner/explain/explain_cost_test.go diff --git a/pkg/sql/plan/explain/explain_expr.go b/pkg/sql/planner/explain/explain_expr.go similarity index 99% rename from pkg/sql/plan/explain/explain_expr.go rename to pkg/sql/planner/explain/explain_expr.go index 39fb745a1169b..b5c3ef65015a3 100644 --- a/pkg/sql/plan/explain/explain_expr.go +++ b/pkg/sql/planner/explain/explain_expr.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/message" ) diff --git a/pkg/sql/plan/explain/explain_hint_test.go b/pkg/sql/planner/explain/explain_hint_test.go similarity index 100% rename from pkg/sql/plan/explain/explain_hint_test.go rename to pkg/sql/planner/explain/explain_hint_test.go diff --git a/pkg/sql/plan/explain/explain_node.go b/pkg/sql/planner/explain/explain_node.go similarity index 99% rename from pkg/sql/plan/explain/explain_node.go rename to pkg/sql/planner/explain/explain_node.go index 3c52d3bed69f1..763ece72dd87b 100644 --- a/pkg/sql/plan/explain/explain_node.go +++ b/pkg/sql/planner/explain/explain_node.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/common" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/vm/message" ) @@ -879,7 +879,7 @@ func (ndesc *NodeDescribeImpl) GetAggregationInfo(ctx context.Context, options * func (ndesc *NodeDescribeImpl) GetSampleFuncInfo(ctx context.Context, options *ExplainOptions) (string, error) { buf := bytes.NewBuffer(make([]byte, 0, 300)) - if ndesc.Node.SampleFunc.Rows == plan2.NotSampleByRows { + if ndesc.Node.SampleFunc.Rows == planner.NotSampleByRows { buf.WriteString(fmt.Sprintf("Sample %.2f Percent by: ", ndesc.Node.SampleFunc.Percent)) } else { buf.WriteString(fmt.Sprintf("Sample %d Rows by: ", ndesc.Node.SampleFunc.Rows)) diff --git a/pkg/sql/plan/explain/explain_query.go b/pkg/sql/planner/explain/explain_query.go similarity index 100% rename from pkg/sql/plan/explain/explain_query.go rename to pkg/sql/planner/explain/explain_query.go diff --git a/pkg/sql/plan/explain/explain_test.go b/pkg/sql/planner/explain/explain_test.go similarity index 96% rename from pkg/sql/plan/explain/explain_test.go rename to pkg/sql/planner/explain/explain_test.go index 04a282507c83c..1b45f70867b9d 100644 --- a/pkg/sql/plan/explain/explain_test.go +++ b/pkg/sql/planner/explain/explain_test.go @@ -21,11 +21,11 @@ import ( "testing" "github.com/matrixorigin/matrixone/pkg/common/moerr" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) func TestSingleSql(t *testing.T) { @@ -40,7 +40,7 @@ func TestSingleSql(t *testing.T) { //input := "explain verbose SELECT l.L_ORDERKEY a FROM CUSTOMER c, ORDERS o, LINEITEM l WHERE c.C_CUSTKEY = o.O_CUSTKEY and l.L_ORDERKEY = o.O_ORDERKEY and o.O_ORDERKEY < 10" //input := "explain verbose update emp set sal = sal + 500, comm = 1200 where deptno = 10" input := "explain verbose select case when p_type like 'PROMO%' then l_extendedprice * (1 - l_discount) when p_type like 'PRX%' then l_extendedprice * (2 - l_discount) else 0 end from lineitem,part where l_shipdate < date '1996-04-01' + interval '1' month" - mock := plan.NewMockOptimizer(true) + mock := planner.NewMockOptimizer(true) err := runOneStmt(mock, t, input) if err != nil { t.Fatalf("%+v", err) @@ -64,7 +64,7 @@ func TestBasicSqlExplain(t *testing.T) { "explain verbose select case when p_type like 'PROMO%' then l_extendedprice * (1 - l_discount) when p_type like 'PRX%' then l_extendedprice * (2 - l_discount) else 0 end from lineitem,part where l_shipdate < date '1996-04-01' + interval '1' month", "explain verbose select column_2 from (values row(0, 1, cast('[3, 4, 5]' as vecf32(3))))", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -99,7 +99,7 @@ func TestSingleTableQuery(t *testing.T) { // "explain verbose SELECT N_REGIONKEY FROM NATION where N_REGIONKEY is null and N_NAME is not null", // "explain SELECT N_REGIONKEY FROM NATION where N_REGIONKEY is null and N_NAME is not null", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -132,7 +132,7 @@ func TestJoinQuery(t *testing.T) { "explain verbose SELECT * FROM NATION a join REGION b on a.N_REGIONKEY = b.R_REGIONKEY WHERE a.N_REGIONKEY > 0", "explain SELECT * FROM NATION a join REGION b on a.N_REGIONKEY = b.R_REGIONKEY WHERE a.N_REGIONKEY > 0", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -159,7 +159,7 @@ func TestNestedQuery(t *testing.T) { l_partkey = p_partkey );`, //tpch q17 } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -177,7 +177,7 @@ func TestDerivedTableQuery(t *testing.T) { "explain select * from (select c_custkey, count(C_NATIONKEY) ff from CUSTOMER group by c_custkey ) a join NATION b on a.c_custkey = b.N_REGIONKEY where b.N_NATIONKEY > 10", "explain verbose select * from (select c_custkey, count(C_NATIONKEY) ff from CUSTOMER group by c_custkey ) a join NATION b on a.c_custkey = b.N_REGIONKEY where b.N_NATIONKEY > 10", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -200,7 +200,7 @@ func TestCollectionQuery(t *testing.T) { "explain verbose SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPINSTRUCT='DELIVER IN PERSON' UNION SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPMODE='AIR' OR l.L_SHIPMODE='AIR REG'", "explain verbose SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPMODE IN ('AIR','AIR REG') EXCEPT SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPINSTRUCT='DELIVER IN PERSON'", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -215,7 +215,7 @@ func TestDMLInsert(t *testing.T) { "explain verbose insert ignore into nation select * from nation2", "explain verbose insert into nation select * from nation2 on duplicate key update n_comment = n_name", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -228,7 +228,7 @@ func TestDMLUpdate(t *testing.T) { "explain UPDATE NATION SET N_NAME ='U1', N_REGIONKEY=N_REGIONKEY+2 WHERE N_NATIONKEY > 10 LIMIT 20", "explain verbose UPDATE NATION SET N_NAME ='U1', N_REGIONKEY=N_REGIONKEY+2 WHERE N_NATIONKEY > 10 LIMIT 20", } - mockOptimizer := plan.NewMockOptimizer(true) + mockOptimizer := planner.NewMockOptimizer(true) runTestShouldPass(mockOptimizer, t, sqls) } @@ -245,7 +245,7 @@ func TestDMLDelete(t *testing.T) { "explain verbose UPDATE NATION SET N_NAME ='U1', N_REGIONKEY=N_REGIONKEY+2 WHERE N_NATIONKEY > 10 LIMIT 20", "explain verbose UPDATE NATION,NATION2 SET NATION.N_NAME ='U1',NATION2.N_NATIONKEY=15 WHERE NATION.N_NATIONKEY = NATION2.N_NATIONKEY", } - mockOptimizer := plan.NewMockOptimizer(true) + mockOptimizer := planner.NewMockOptimizer(true) runTestShouldPass(mockOptimizer, t, sqls) } @@ -263,7 +263,7 @@ func TestSystemVariableAndUserVariable(t *testing.T) { "explain verbose select @@session.autocommit,@val from NATION", "explain verbose select @@session.autocommit,@val,N_NAME from NATION", } - mockOptimizer := plan.NewMockOptimizer(false) + mockOptimizer := planner.NewMockOptimizer(false) runTestShouldPass(mockOptimizer, t, sqls) } @@ -280,7 +280,7 @@ func TestSingleTableDeleteSQL(t *testing.T) { "explain verbose delete from emp where deptno = 20 order by sal limit 2", "explain verbose delete from emp where empno > 7800 order by empno limit 2", } - mockOptimizer := plan.NewMockOptimizer(true) + mockOptimizer := planner.NewMockOptimizer(true) runTestShouldPass(mockOptimizer, t, sqls) } @@ -298,7 +298,7 @@ func TestCompositeUniqueIndexTableDeleteSQL(t *testing.T) { "explain verbose delete employees, dept from employees, dept where employees.deptno = dept.deptno and sal > 2000", "explain verbose DELETE FROM employees, dept USING employees INNER JOIN dept WHERE employees.deptno = dept.deptno", } - mockOptimizer := plan.NewMockOptimizer(true) + mockOptimizer := planner.NewMockOptimizer(true) runTestShouldPass(mockOptimizer, t, sqls) } @@ -312,14 +312,14 @@ func TestMultiTableDeleteSQL(t *testing.T) { "explain verbose delete emp,dept from emp ,dept where emp.deptno = dept.deptno and empno = 7839", "explain verbose DELETE FROM emp, dept USING emp INNER JOIN dept WHERE emp.deptno = dept.deptno", } - mockOptimizer := plan.NewMockOptimizer(true) + mockOptimizer := planner.NewMockOptimizer(true) runTestShouldPass(mockOptimizer, t, sqls) } func TestGetUpdateCtxInfo(t *testing.T) { // Create a test node with update context containing partition columns - node := &plan2.Node{ - UpdateCtxList: []*plan2.UpdateCtx{ + node := &plan.Node{ + UpdateCtxList: []*plan.UpdateCtx{ { TableDef: &plan.TableDef{ Name: "test_table", @@ -365,7 +365,7 @@ func TestGetUpdateCtxInfo(t *testing.T) { } } -func runTestShouldPass(opt plan.Optimizer, t *testing.T, sqls []string) { +func runTestShouldPass(opt planner.Optimizer, t *testing.T, sqls []string) { for _, sql := range sqls { err := runOneStmt(opt, t, sql) if err != nil { @@ -374,7 +374,7 @@ func runTestShouldPass(opt plan.Optimizer, t *testing.T, sqls []string) { } } -func runOneStmt(opt plan.Optimizer, t *testing.T, sql string) error { +func runOneStmt(opt planner.Optimizer, t *testing.T, sql string) error { t.Logf("SQL: %v\n", sql) stmts, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { @@ -418,7 +418,7 @@ func runOneStmt(opt plan.Optimizer, t *testing.T, sql string) error { // this sql always return one stmt ctx := opt.CurrentContext() - logicPlan, err := plan.BuildPlan(ctx, stmt.Statement, false) + logicPlan, err := planner.BuildPlan(ctx, stmt.Statement, false) if err != nil { t.Errorf("Build Query Plan error: '%v'", tree.String(stmt, dialect.MYSQL)) return err @@ -594,18 +594,18 @@ func TestAnalyzeInfoDescribeImpl_GetDescription_ReadSize(t *testing.T) { Format: EXPLAIN_FORMAT_TEXT, Verbose: false, Analyze: true, - NodeType: plan2.Node_TABLE_SCAN, + NodeType: plan.Node_TABLE_SCAN, } tests := []struct { name string - analyzeInfo *plan2.AnalyzeInfo + analyzeInfo *plan.AnalyzeInfo wantContains []string wantNotContains []string }{ { name: "with ReadSize, S3ReadSize, DiskReadSize", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 1000000000, // 1000ms WaitTimeConsumed: 0, InputRows: 1000, @@ -638,7 +638,7 @@ func TestAnalyzeInfoDescribeImpl_GetDescription_ReadSize(t *testing.T) { }, { name: "with ReadSize only, no S3ReadSize and DiskReadSize", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 500000000, // 500ms WaitTimeConsumed: 0, InputRows: 500, @@ -664,7 +664,7 @@ func TestAnalyzeInfoDescribeImpl_GetDescription_ReadSize(t *testing.T) { }, { name: "with S3ReadSize and DiskReadSize, no ReadSize", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 200000000, // 200ms WaitTimeConsumed: 0, InputRows: 200, @@ -690,7 +690,7 @@ func TestAnalyzeInfoDescribeImpl_GetDescription_ReadSize(t *testing.T) { }, { name: "all ReadSize fields are zero", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 100000000, // 100ms WaitTimeConsumed: 0, InputRows: 100, diff --git a/pkg/sql/plan/explain/fultext_test.go b/pkg/sql/planner/explain/fultext_test.go similarity index 100% rename from pkg/sql/plan/explain/fultext_test.go rename to pkg/sql/planner/explain/fultext_test.go diff --git a/pkg/sql/planner/explain/marshal_model.go b/pkg/sql/planner/explain/marshal_model.go new file mode 100644 index 0000000000000..423b55f481276 --- /dev/null +++ b/pkg/sql/planner/explain/marshal_model.go @@ -0,0 +1,95 @@ +// Copyright 2021 - 2022 Matrix Origin +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package explain + +import ( + "strconv" + + "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/models" +) + +func buildEdge(parentNode *plan.Node, childNode *plan.Node, index int32) *models.Edge { + edge := &models.Edge{ + Id: "E" + strconv.Itoa(int(index)), + Src: strconv.FormatInt(int64(childNode.NodeId), 10), + Dst: strconv.FormatInt(int64(parentNode.NodeId), 10), + Unit: "count", + } + if childNode.AnalyzeInfo != nil { + edge.Output = childNode.AnalyzeInfo.OutputRows + } + return edge +} + +var nodeTypeToNameMap = map[plan.Node_NodeType]string{ + plan.Node_UNKNOWN: "UnKnown Node", + + plan.Node_VALUE_SCAN: "Values Scan", + plan.Node_TABLE_SCAN: "Table Scan", + plan.Node_FUNCTION_SCAN: "Function Scan", + plan.Node_EXTERNAL_SCAN: "External Scan", + plan.Node_MATERIAL_SCAN: "Material Scan", + plan.Node_SOURCE_SCAN: "Source Scan", + + plan.Node_PROJECT: "Project", + + plan.Node_EXTERNAL_FUNCTION: "External Function", + + plan.Node_MATERIAL: "Material", + plan.Node_SINK: "Sink", + plan.Node_SINK_SCAN: "Sink Scan", + plan.Node_RECURSIVE_SCAN: "Recursive Scan", + plan.Node_RECURSIVE_CTE: "CTE Scan", + + plan.Node_AGG: "Aggregate", + plan.Node_DISTINCT: "Distinct", + plan.Node_FILTER: "Filter", + plan.Node_JOIN: "Join", + plan.Node_SAMPLE: "Sample", + plan.Node_SORT: "Sort", + plan.Node_UNION: "Union", + plan.Node_UNION_ALL: "Union All", + plan.Node_UNIQUE: "Unique", + plan.Node_WINDOW: "Window", + + plan.Node_BROADCAST: "Broadcast", + plan.Node_SPLIT: "Split", + plan.Node_GATHER: "Gather", + + plan.Node_ASSERT: "Assert", + + plan.Node_INSERT: "Insert", + plan.Node_DELETE: "Delete", + plan.Node_REPLACE: "Replace", + plan.Node_MULTI_UPDATE: "Multi Update", + + plan.Node_LOCK_OP: "Lock Operator", + + plan.Node_INTERSECT: "Intersect", + plan.Node_INTERSECT_ALL: "Intersect All", + plan.Node_MINUS: "Minus", + plan.Node_MINUS_ALL: "Minus All", + + plan.Node_ON_DUPLICATE_KEY: "On Duplicate Key", + plan.Node_PRE_INSERT: "Pre Insert", + plan.Node_PRE_INSERT_UK: "Pre Insert Unique", + plan.Node_PRE_INSERT_SK: "Pre Insert 2nd Key", + + plan.Node_TIME_WINDOW: "Time window", + plan.Node_FILL: "Fill", + plan.Node_PARTITION: "Partition", + plan.Node_FUZZY_FILTER: "Fuzzy filter", +} diff --git a/pkg/sql/plan/explain/marshal_query.go b/pkg/sql/planner/explain/marshal_query.go similarity index 100% rename from pkg/sql/plan/explain/marshal_query.go rename to pkg/sql/planner/explain/marshal_query.go diff --git a/pkg/sql/plan/explain/marshal_query_test.go b/pkg/sql/planner/explain/marshal_query_test.go similarity index 93% rename from pkg/sql/plan/explain/marshal_query_test.go rename to pkg/sql/planner/explain/marshal_query_test.go index 5edf520eae9c2..e465ba41d2582 100644 --- a/pkg/sql/plan/explain/marshal_query_test.go +++ b/pkg/sql/planner/explain/marshal_query_test.go @@ -22,20 +22,20 @@ import ( "testing" "github.com/google/uuid" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) func TestGetLabelOrTitle(t *testing.T) { m := MarshalNodeImpl{} errCount := 0 - for _, v := range plan2.Node_NodeType_value { - node := &plan2.Node{ - NodeType: plan2.Node_NodeType(v), + for _, v := range plan.Node_NodeType_value { + node := &plan.Node{ + NodeType: plan.Node_NodeType(v), NodeId: 0, - InsertCtx: &plan2.InsertCtx{ - Ref: &plan2.ObjectRef{}, + InsertCtx: &plan.InsertCtx{ + Ref: &plan.ObjectRef{}, }, Children: []int32{0, 1}, } @@ -44,18 +44,18 @@ func TestGetLabelOrTitle(t *testing.T) { Verbose: false, Analyze: false, Format: EXPLAIN_FORMAT_TEXT, - NodeType: plan2.Node_NodeType(v), + NodeType: plan.Node_NodeType(v), } _, e := m.GetNodeTitle(context.TODO(), opt) if e != nil && e.Error() == "internal error: "+errUnsupportedNodeType { errCount = errCount + 1 - fmt.Printf("you should add title for node=%s\n", plan2.Node_NodeType_name[v]) + fmt.Printf("you should add title for node=%s\n", plan.Node_NodeType_name[v]) } _, e = m.GetNodeLabels(context.TODO(), opt) if e != nil && e.Error() == "internal error: "+errUnsupportedNodeType { errCount = errCount + 1 - fmt.Printf("you should add label for node=%s\n", plan2.Node_NodeType_name[v]) + fmt.Printf("you should add label for node=%s\n", plan.Node_NodeType_name[v]) } } if errCount > 0 { @@ -87,7 +87,7 @@ func TestSimpleQueryToJson(t *testing.T) { "select c_custkey from (select c_custkey, count(C_NATIONKEY) ff from CUSTOMER group by c_custkey ) a join NATION b on a.c_custkey = b.N_REGIONKEY where b.N_NATIONKEY > 10", } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -158,7 +158,7 @@ func TestSingleTableQueryToJson(t *testing.T) { //"delete from nation", //"delete nation, nation2 from nation join nation2 on nation.n_name = nation2.n_name", } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -180,7 +180,7 @@ func TestJoinQueryToJson(t *testing.T) { "select n_name from nation intersect select n_name from nation2", "select n_name from nation minus select n_name from nation2", } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -211,7 +211,7 @@ func TestNestedQueryToJson(t *testing.T) { l_partkey = p_partkey );`, //tpch q17 } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -233,7 +233,7 @@ func TestCollectionQueryToJson(t *testing.T) { "SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPMODE IN ('AIR','AIR REG') EXCEPT SELECT distinct(l.L_ORDERKEY) FROM LINEITEM AS l WHERE l.L_SHIPINSTRUCT='DELIVER IN PERSON'", } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -249,7 +249,7 @@ func TestDerivedTableQueryToJson(t *testing.T) { "select a.* from (select c_custkey, count(C_NATIONKEY) ff from CUSTOMER group by c_custkey ) a join NATION b on a.c_custkey = b.N_REGIONKEY where b.N_NATIONKEY > 10", "select * from (select c_custkey, count(C_NATIONKEY) ff from CUSTOMER group by c_custkey ) a join NATION b on a.c_custkey = b.N_REGIONKEY where b.N_NATIONKEY > 10", } - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) buildPlanMarshalTest(mock, t, sqls) } @@ -261,21 +261,21 @@ func TestDMLToJson(t *testing.T) { "DELETE FROM NATION WHERE N_NATIONKEY > 10", "DELETE FROM a1, a2 USING NATION AS a1 INNER JOIN NATION2 AS a2 WHERE a1.N_NATIONKEY=a2.N_NATIONKEY", } - mock := plan.NewMockOptimizer(true) + mock := planner.NewMockOptimizer(true) buildPlanMarshalTest(mock, t, sqls) } -func buildPlanMarshalTest(opt plan.Optimizer, t *testing.T, sqls []string) { +func buildPlanMarshalTest(opt planner.Optimizer, t *testing.T, sqls []string) { ctx := context.TODO() for _, sql := range sqls { t.Logf("sql: %s \n", sql) - plan, err := runSingleSql(opt, t, sql) + pl, err := runSingleSql(opt, t, sql) if err != nil { t.Fatalf("%+v", err) } - queryPlan := plan.GetQuery() + queryPlan := pl.GetQuery() for _, node := range queryPlan.Nodes { - info := plan2.AnalyzeInfo{ + info := plan.AnalyzeInfo{ InputRows: 12, OutputRows: 12, TimeConsumed: 5, @@ -302,14 +302,14 @@ func buildPlanMarshalTest(opt plan.Optimizer, t *testing.T, sqls []string) { } } -func runSingleSql(opt plan.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { +func runSingleSql(opt planner.Optimizer, t *testing.T, sql string) (*plan.Plan, error) { stmts, err := mysql.Parse(opt.CurrentContext().GetContext(), sql, 1) if err != nil { t.Fatalf("%+v", err) } // this sql always return one stmt ctx := opt.CurrentContext() - return plan.BuildPlan(ctx, stmts[0], false) + return planner.BuildPlan(ctx, stmts[0], false) } func TestMarshalNodeImpl_GetStatistics_ReadSize(t *testing.T) { @@ -318,18 +318,18 @@ func TestMarshalNodeImpl_GetStatistics_ReadSize(t *testing.T) { Format: EXPLAIN_FORMAT_JSON, Verbose: false, Analyze: true, - NodeType: plan2.Node_TABLE_SCAN, + NodeType: plan.Node_TABLE_SCAN, } tests := []struct { name string - analyzeInfo *plan2.AnalyzeInfo + analyzeInfo *plan.AnalyzeInfo wantFields []string checkValues map[string]int64 }{ { name: "with ReadSize, S3ReadSize, DiskReadSize", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 1000000000, // 1000ms WaitTimeConsumed: 0, InputRows: 1000, @@ -351,7 +351,7 @@ func TestMarshalNodeImpl_GetStatistics_ReadSize(t *testing.T) { }, { name: "with ReadSize only", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 500000000, // 500ms WaitTimeConsumed: 0, InputRows: 500, @@ -371,7 +371,7 @@ func TestMarshalNodeImpl_GetStatistics_ReadSize(t *testing.T) { }, { name: "all ReadSize fields are zero", - analyzeInfo: &plan2.AnalyzeInfo{ + analyzeInfo: &plan.AnalyzeInfo{ TimeConsumed: 100000000, // 100ms WaitTimeConsumed: 0, InputRows: 100, @@ -391,8 +391,8 @@ func TestMarshalNodeImpl_GetStatistics_ReadSize(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - node := &plan2.Node{ - NodeType: plan2.Node_TABLE_SCAN, + node := &plan.Node{ + NodeType: plan.Node_TABLE_SCAN, NodeId: 0, AnalyzeInfo: tt.analyzeInfo, } diff --git a/pkg/sql/plan/explain/tpch_explain_test.go b/pkg/sql/planner/explain/tpch_explain_test.go similarity index 97% rename from pkg/sql/plan/explain/tpch_explain_test.go rename to pkg/sql/planner/explain/tpch_explain_test.go index 19d407ba44e70..b6fdc142f4b9b 100644 --- a/pkg/sql/plan/explain/tpch_explain_test.go +++ b/pkg/sql/planner/explain/tpch_explain_test.go @@ -22,17 +22,16 @@ import ( "runtime" "testing" - "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" + "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) func Test_TPCH_Plan2(t *testing.T) { ctx := context.TODO() - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) es := &ExplainOptions{ Verbose: true, Analyze: false, diff --git a/pkg/sql/plan/explain/types.go b/pkg/sql/planner/explain/types.go similarity index 100% rename from pkg/sql/plan/explain/types.go rename to pkg/sql/planner/explain/types.go diff --git a/pkg/sql/plan/expr_opt.go b/pkg/sql/planner/expr_opt.go similarity index 98% rename from pkg/sql/plan/expr_opt.go rename to pkg/sql/planner/expr_opt.go index 904a5c2c57031..48a1b03ce3565 100644 --- a/pkg/sql/plan/expr_opt.go +++ b/pkg/sql/planner/expr_opt.go @@ -12,12 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/util" ) @@ -267,7 +267,7 @@ func (builder *QueryBuilder) doMergeFiltersOnCompositeKey(tableDef *plan.TableDe inArgs := make([]*plan.Expr, len(lastFilter.GetF().Args[1].GetList().List)) for i, lastArg := range lastFilter.GetF().Args[1].GetList().List { - tmpSerialArgs := DeepCopyExprList(serialArgs) + tmpSerialArgs := plan.DeepCopyExprList(serialArgs) tmpSerialArgs = append(tmpSerialArgs, lastArg) rightArg, _ := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "serial", tmpSerialArgs) inArgs[i] = rightArg @@ -295,7 +295,7 @@ func (builder *QueryBuilder) doMergeFiltersOnCompositeKey(tableDef *plan.TableDe serialArgs[i] = filters[filterIdx[i]].GetF().Args[1] } - tmpSerialArgs := DeepCopyExprList(serialArgs) + tmpSerialArgs := plan.DeepCopyExprList(serialArgs) tmpSerialArgs = append(tmpSerialArgs, lastFilter.GetF().Args[1]) leftArg, _ := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "serial", tmpSerialArgs) diff --git a/pkg/sql/plan/external.go b/pkg/sql/planner/external.go similarity index 98% rename from pkg/sql/plan/external.go rename to pkg/sql/planner/external.go index 6db3ff550dada..a25dfd133192c 100644 --- a/pkg/sql/plan/external.go +++ b/pkg/sql/planner/external.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bufio" @@ -147,7 +147,7 @@ func getAccountCol(filepath string) string { return pathDir[1] } -func getExternalStats(node *plan.Node, builder *QueryBuilder) *Stats { +func getExternalStats(node *plan.Node, builder *QueryBuilder) *plan.Stats { externScan := node.ExternScan if externScan != nil && externScan.LoadType == tree.INLINE { totolSize := len(externScan.Data) diff --git a/pkg/sql/plan/flatten_subquery.go b/pkg/sql/planner/flatten_subquery.go similarity index 98% rename from pkg/sql/plan/flatten_subquery.go rename to pkg/sql/planner/flatten_subquery.go index b0976309d10c6..3daad2e7d80ae 100644 --- a/pkg/sql/plan/flatten_subquery.go +++ b/pkg/sql/planner/flatten_subquery.go @@ -12,13 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) var ( @@ -184,11 +184,11 @@ func (builder *QueryBuilder) flattenSubquery(nodeID int32, subquery *plan.Subque return nodeID, retExpr, err } funcID, returnType := fGet.GetEncodedOverloadID(), fGet.GetReturnType() - isNullExpr := &Expr{ + isNullExpr := &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ Func: getFunctionObjRef(funcID, "isnull"), - Args: []*Expr{retExpr}, + Args: []*plan.Expr{retExpr}, }, }, Typ: makePlan2Type(&returnType), @@ -203,11 +203,11 @@ func (builder *QueryBuilder) flattenSubquery(nodeID int32, subquery *plan.Subque return nodeID, retExpr, nil } funcID, returnType = fGet.GetEncodedOverloadID(), fGet.GetReturnType() - retExpr = &Expr{ + retExpr = &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ Func: getFunctionObjRef(funcID, "case"), - Args: []*Expr{isNullExpr, zeroExpr, DeepCopyExpr(retExpr)}, + Args: []*plan.Expr{isNullExpr, zeroExpr, plan.DeepCopyExpr(retExpr)}, }, }, Typ: makePlan2Type(&returnType), @@ -473,7 +473,7 @@ func (builder *QueryBuilder) pullupCorrelatedPredicates(nodeID int32, ctx *BindC var newFilterList []*plan.Expr for _, cond := range node.FilterList { if hasCorrCol(cond) { - //cond, err = bindFuncExprImplByPlanExpr("is", []*plan.Expr{cond, DeepCopyExpr(constTrue)}) + //cond, err = bindFuncExprImplByPlanExpr("is", []*plan.Expr{cond, plan.DeepCopyExpr(constTrue)}) if err != nil { return 0, nil, err } diff --git a/pkg/sql/plan/fromOldFunction.go b/pkg/sql/planner/fromOldFunction.go similarity index 99% rename from pkg/sql/plan/fromOldFunction.go rename to pkg/sql/planner/fromOldFunction.go index dee03fd783089..139829c17f419 100644 --- a/pkg/sql/plan/fromOldFunction.go +++ b/pkg/sql/planner/fromOldFunction.go @@ -12,11 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( - "github.com/matrixorigin/matrixone/pkg/container/types" "unicode" + + "github.com/matrixorigin/matrixone/pkg/container/types" ) // From old function code, and they were only used by plan. diff --git a/pkg/sql/plan/fulltext.go b/pkg/sql/planner/fulltext.go similarity index 98% rename from pkg/sql/plan/fulltext.go rename to pkg/sql/planner/fulltext.go index dfef79657accb..62c63c723194f 100644 --- a/pkg/sql/plan/fulltext.go +++ b/pkg/sql/planner/fulltext.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "encoding/json" @@ -82,7 +82,7 @@ func (builder *QueryBuilder) buildFullTextIndexScan(tbl *tree.TableFunction, ctx return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS != 5).") } - colDefs := DeepCopyColDefList(ftIndexColdefs) + colDefs := plan.DeepCopyColDefList(ftIndexColdefs) params, err := builder.getFullTextParams(tbl.Func) if err != nil { @@ -180,7 +180,7 @@ func (builder *QueryBuilder) buildFullTextIndexTokenize(tbl *tree.TableFunction, return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS < 3).") } - colDefs := DeepCopyColDefList(tokenizeColDefs) + colDefs := plan.DeepCopyColDefList(tokenizeColDefs) params, err := builder.getFullTextParams(tbl.Func) if err != nil { return 0, err diff --git a/pkg/sql/plan/generate_series.go b/pkg/sql/planner/generate_series.go similarity index 99% rename from pkg/sql/plan/generate_series.go rename to pkg/sql/planner/generate_series.go index 4f408d722f206..8f18ab2683de8 100644 --- a/pkg/sql/plan/generate_series.go +++ b/pkg/sql/planner/generate_series.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" diff --git a/pkg/sql/plan/group_binder.go b/pkg/sql/planner/group_binder.go similarity index 99% rename from pkg/sql/plan/group_binder.go rename to pkg/sql/planner/group_binder.go index d1bf20734f898..0ea8f1747d0a5 100644 --- a/pkg/sql/plan/group_binder.go +++ b/pkg/sql/planner/group_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/having_binder.go b/pkg/sql/planner/having_binder.go similarity index 96% rename from pkg/sql/plan/having_binder.go rename to pkg/sql/planner/having_binder.go index 8cdd25bfadc98..cf984ae3f644d 100644 --- a/pkg/sql/plan/having_binder.go +++ b/pkg/sql/planner/having_binder.go @@ -12,15 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" ) func NewHavingBinder(builder *QueryBuilder, ctx *BindContext) *HavingBinder { @@ -169,7 +169,7 @@ func (b *HavingBinder) BindAggFunc(funcName string, astExpr *tree.FuncExpr, dept }, nil } -func (b *HavingBinder) remapAggToTimeWindowCacheAgg(expr *Expr) (*Expr, error) { +func (b *HavingBinder) remapAggToTimeWindowCacheAgg(expr *plan.Expr) (*plan.Expr, error) { f := expr.Expr.(*plan.Expr_F).F funcId, _ := function.DecodeOverloadID(f.Func.Obj) @@ -189,7 +189,7 @@ func (b *HavingBinder) remapAggToTimeWindowCacheAgg(expr *Expr) (*Expr, error) { return expr, nil } -func (b *HavingBinder) remapAggToTimeWindowResultAgg(expr *Expr) (*Expr, error) { +func (b *HavingBinder) remapAggToTimeWindowResultAgg(expr *plan.Expr) (*plan.Expr, error) { obj := expr.Expr.(*plan.Expr_F).F.Func funcId, _ := function.DecodeOverloadID(obj.Obj) @@ -315,7 +315,7 @@ func (b *HavingBinder) BindTimeWindowFunc(funcName string, astExpr *tree.FuncExp colPos := int32(len(b.ctx.times)) aggColPos := colPos - forgeColCnt - expr := DeepCopyExpr(b.ctx.aggregates[aggColPos]) + expr := plan.DeepCopyExpr(b.ctx.aggregates[aggColPos]) expr.Expr.(*plan.Expr_F).F.Args = []*plan.Expr{ { Typ: b.ctx.aggregates[aggColPos].Typ, diff --git a/pkg/sql/plan/hnsw.go b/pkg/sql/planner/hnsw.go similarity index 96% rename from pkg/sql/plan/hnsw.go rename to pkg/sql/planner/hnsw.go index 2948153bdb1f6..74d681b46bc81 100644 --- a/pkg/sql/plan/hnsw.go +++ b/pkg/sql/planner/hnsw.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -63,7 +63,7 @@ func (builder *QueryBuilder) buildHnswCreate(tbl *tree.TableFunction, ctx *BindC return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS < 4).") } - colDefs := DeepCopyColDefList(kHNSWBuildIndexColDefs) + colDefs := plan.DeepCopyColDefList(kHNSWBuildIndexColDefs) params, err := builder.getHnswParams(tbl.Func) if err != nil { return 0, err @@ -105,7 +105,7 @@ func (builder *QueryBuilder) buildHnswSearch(tbl *tree.TableFunction, ctx *BindC return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS != 3).") } - colDefs := DeepCopyColDefList(kHNSWSearchColDefs) + colDefs := plan.DeepCopyColDefList(kHNSWSearchColDefs) params, err := builder.getHnswParams(tbl.Func) if err != nil { diff --git a/pkg/sql/plan/index_table_dml_test.go b/pkg/sql/planner/index_table_dml_test.go similarity index 99% rename from pkg/sql/plan/index_table_dml_test.go rename to pkg/sql/planner/index_table_dml_test.go index 98380b5e61399..4bb83692ad72a 100644 --- a/pkg/sql/plan/index_table_dml_test.go +++ b/pkg/sql/planner/index_table_dml_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" diff --git a/pkg/sql/plan/ivfflat.go b/pkg/sql/planner/ivfflat.go similarity index 96% rename from pkg/sql/plan/ivfflat.go rename to pkg/sql/planner/ivfflat.go index f9f5d16d68834..00c1aeb606d5c 100644 --- a/pkg/sql/plan/ivfflat.go +++ b/pkg/sql/planner/ivfflat.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -62,7 +62,7 @@ func (builder *QueryBuilder) buildIvfCreate(tbl *tree.TableFunction, ctx *BindCo return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS < 3).") } - colDefs := DeepCopyColDefList(kIVFBuildIndexColDefs) + colDefs := plan.DeepCopyColDefList(kIVFBuildIndexColDefs) params, err := builder.getIvfParams(tbl.Func) if err != nil { return 0, err @@ -102,7 +102,7 @@ func (builder *QueryBuilder) buildIvfSearch(tbl *tree.TableFunction, ctx *BindCo return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS != 3).") } - colDefs := DeepCopyColDefList(kIVFSearchColDefs) + colDefs := plan.DeepCopyColDefList(kIVFSearchColDefs) params, err := builder.getIvfParams(tbl.Func) if err != nil { diff --git a/pkg/sql/plan/join_order.go b/pkg/sql/planner/join_order.go similarity index 99% rename from pkg/sql/plan/join_order.go rename to pkg/sql/planner/join_order.go index 7385a785496c5..e9da17f531559 100644 --- a/pkg/sql/plan/join_order.go +++ b/pkg/sql/planner/join_order.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) type joinEdge struct { diff --git a/pkg/sql/plan/limit_binder.go b/pkg/sql/planner/limit_binder.go similarity index 99% rename from pkg/sql/plan/limit_binder.go rename to pkg/sql/planner/limit_binder.go index 8e754e4eea7f2..2b2922b019910 100644 --- a/pkg/sql/plan/limit_binder.go +++ b/pkg/sql/planner/limit_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/make.go b/pkg/sql/planner/make.go similarity index 93% rename from pkg/sql/plan/make.go rename to pkg/sql/planner/make.go index 4176fe5bfbfae..fbe2df38428f8 100644 --- a/pkg/sql/plan/make.go +++ b/pkg/sql/planner/make.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -24,16 +24,16 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" ) -func MakePlan2Decimal64ExprWithType(v types.Decimal64, typ *Type) *plan.Expr { +func MakePlan2Decimal64ExprWithType(v types.Decimal64, typ *plan.Type) *plan.Expr { rawA := int64(v) return &plan.Expr{ Typ: *typ, Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal64Val{ Decimal64Val: &plan.Decimal64{ @@ -45,13 +45,13 @@ func MakePlan2Decimal64ExprWithType(v types.Decimal64, typ *Type) *plan.Expr { } } -func MakePlan2Decimal128ExprWithType(v types.Decimal128, typ *Type) *plan.Expr { +func MakePlan2Decimal128ExprWithType(v types.Decimal128, typ *plan.Type) *plan.Expr { rawA := v.B0_63 rawB := v.B64_127 return &plan.Expr{ Typ: *typ, Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: false, Value: &plan.Literal_Decimal128Val{ Decimal128Val: &plan.Decimal128{ @@ -91,7 +91,7 @@ func makePlan2DecimalExprWithType(ctx context.Context, v string, isBin ...bool) func makePlan2DateConstNullExpr(t types.T) *plan.Expr { return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, @@ -105,7 +105,7 @@ func makePlan2DateConstNullExpr(t types.T) *plan.Expr { func makePlan2Decimal128ConstNullExpr() *plan.Expr { return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, @@ -121,7 +121,7 @@ func makePlan2Decimal128ConstNullExpr() *plan.Expr { func makePlan2NullConstExprWithType() *plan.Expr { return &plan.Expr{ Expr: &plan.Expr_Lit{ - Lit: &Const{ + Lit: &plan.Literal{ Isnull: true, }, }, @@ -563,7 +563,7 @@ func MakePlan2NullTextConstExprWithType(v string) *plan.Expr { } } -func makePlan2CastExpr(ctx context.Context, expr *Expr, targetType Type) (*Expr, error) { +func makePlan2CastExpr(ctx context.Context, expr *plan.Expr, targetType plan.Type) (*plan.Expr, error) { var err error if expr == nil { return nil, moerr.NewInvalidInput(ctx, "nil expression in cast") @@ -598,15 +598,15 @@ func makePlan2CastExpr(ctx context.Context, expr *Expr, targetType Type) (*Expr, return &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ - Func: &ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, - Args: []*Expr{expr, t}, + Func: &plan.ObjectRef{Obj: fGet.GetEncodedOverloadID(), ObjName: "cast"}, + Args: []*plan.Expr{expr, t}, }, }, Typ: targetType, }, nil } -func funcCastForEnumType(ctx context.Context, expr *Expr, targetType Type) (*Expr, error) { +func funcCastForEnumType(ctx context.Context, expr *plan.Expr, targetType plan.Type) (*plan.Expr, error) { var err error if targetType.Id != int32(types.T_enum) { return expr, nil @@ -617,7 +617,7 @@ func funcCastForEnumType(ctx context.Context, expr *Expr, targetType Type) (*Exp } // bind ast function's args - args := make([]*Expr, len(astArgs)+1) + args := make([]*plan.Expr, len(astArgs)+1) binder := NewDefaultBinder(ctx, nil, nil, targetType, nil) for idx, arg := range astArgs { if idx == len(args)-1 { @@ -698,16 +698,16 @@ func makeTypeByPlan2Expr(expr *plan.Expr) types.Type { return types.New(oid, expr.Typ.Width, expr.Typ.Scale) } -func makeHiddenColTyp() Type { - return Type{ +func makeHiddenColTyp() plan.Type { + return plan.Type{ Id: int32(types.T_varchar), Width: types.MaxVarcharLen, } } // used for Compound primary key column name && clusterby column name -func MakeHiddenColDefByName(name string) *ColDef { - return &ColDef{ +func MakeHiddenColDefByName(name string) *plan.ColDef { + return &plan.ColDef{ Name: name, Hidden: true, Typ: makeHiddenColTyp(), @@ -719,11 +719,11 @@ func MakeHiddenColDefByName(name string) *ColDef { } } -func MakeRowIdColDef() *ColDef { - return &ColDef{ +func MakeRowIdColDef() *plan.ColDef { + return &plan.ColDef{ Name: catalog.Row_ID, Hidden: true, - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_Rowid), }, Default: &plan.Default{ @@ -734,7 +734,7 @@ func MakeRowIdColDef() *ColDef { } } -func isSameColumnType(t1 Type, t2 Type) bool { +func isSameColumnType(t1 plan.Type, t2 plan.Type) bool { if t1.Id != t2.Id { return false } @@ -746,10 +746,10 @@ func isSameColumnType(t1 Type, t2 Type) bool { // GetColDefFromTable Find the target column definition from the predefined // table columns and return its deep copy -func GetColDefFromTable(Cols []*ColDef, hidenColName string) *ColDef { +func GetColDefFromTable(Cols []*plan.ColDef, hidenColName string) *plan.ColDef { for _, coldef := range Cols { if coldef.Name == hidenColName { - return DeepCopyColDef(coldef) + return plan.DeepCopyColDef(coldef) } } panic("Unable to find target column from predefined table columns") diff --git a/pkg/sql/plan/make_test.go b/pkg/sql/planner/make_test.go similarity index 99% rename from pkg/sql/plan/make_test.go rename to pkg/sql/planner/make_test.go index 87215462af5ce..9f492a11c8009 100644 --- a/pkg/sql/plan/make_test.go +++ b/pkg/sql/planner/make_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" diff --git a/pkg/sql/plan/message.go b/pkg/sql/planner/message.go similarity index 97% rename from pkg/sql/plan/message.go rename to pkg/sql/planner/message.go index 07504b67eb6f1..c80c03c3bb708 100644 --- a/pkg/sql/plan/message.go +++ b/pkg/sql/planner/message.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" @@ -83,7 +83,7 @@ func (builder *QueryBuilder) handleMessageFromTopToScan(nodeID int32) { msgHeader := plan.MsgHeader{MsgTag: msgTag, MsgType: int32(message.MsgTopValue)} node.SendMsgList = append(node.SendMsgList, msgHeader) scanNode.RecvMsgList = append(scanNode.RecvMsgList, msgHeader) - scanNode.OrderBy = append(scanNode.OrderBy, DeepCopyOrderBySpec(node.OrderBy[0])) + scanNode.OrderBy = append(scanNode.OrderBy, plan.DeepCopyOrderBySpec(node.OrderBy[0])) } func (builder *QueryBuilder) handleHashMapMessages(nodeID int32) { diff --git a/pkg/sql/plan/meta_scan.go b/pkg/sql/planner/meta_scan.go similarity index 99% rename from pkg/sql/plan/meta_scan.go rename to pkg/sql/planner/meta_scan.go index d0cbe8a01ee37..ee9eb093addb8 100644 --- a/pkg/sql/plan/meta_scan.go +++ b/pkg/sql/planner/meta_scan.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" diff --git a/pkg/sql/plan/metadata_scan.go b/pkg/sql/planner/metadata_scan.go similarity index 99% rename from pkg/sql/plan/metadata_scan.go rename to pkg/sql/planner/metadata_scan.go index 857d6cf94f761..55ac6c581673f 100644 --- a/pkg/sql/plan/metadata_scan.go +++ b/pkg/sql/planner/metadata_scan.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "strings" diff --git a/pkg/sql/plan/mock.go b/pkg/sql/planner/mock.go similarity index 95% rename from pkg/sql/plan/mock.go rename to pkg/sql/planner/mock.go index 36384838327d4..a7c978568f3b2 100644 --- a/pkg/sql/plan/mock.go +++ b/pkg/sql/planner/mock.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -24,8 +24,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -34,8 +34,8 @@ var _ CompilerContext = &MockCompilerContext{} type MockCompilerContext struct { dbs map[string]bool - objects map[string]*ObjectRef - tables map[string]*TableDef + objects map[string]*plan.ObjectRef + tables map[string]*plan.TableDef pks map[string][]int id2name map[uint64]string isDml bool @@ -47,10 +47,10 @@ type MockCompilerContext struct { // Add function fields for test overrides GetAccountNameFunc func() string GetAccountIdFunc func() (uint32, error) - DatabaseExistsFunc func(string, *Snapshot) bool - GetDatabaseIdFunc func(string, *Snapshot) (uint64, error) + DatabaseExistsFunc func(string, *plan.Snapshot) bool + GetDatabaseIdFunc func(string, *plan.Snapshot) (uint64, error) ResolveAccountIdsFunc func([]string) ([]uint32, error) - ResolveFunc func(string, string, *Snapshot) (*ObjectRef, *TableDef) + ResolveFunc func(string, string, *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef) } func (m *MockCompilerContext) GetLowerCaseTableNames() int64 { @@ -64,11 +64,11 @@ func (m *MockCompilerContext) GetViews() []string { func (m *MockCompilerContext) SetViews(views []string) { } -func (m *MockCompilerContext) GetSnapshot() *Snapshot { +func (m *MockCompilerContext) GetSnapshot() *plan.Snapshot { return nil } -func (m *MockCompilerContext) SetSnapshot(snapshot *Snapshot) {} +func (m *MockCompilerContext) SetSnapshot(snapshot *plan.Snapshot) {} func (m *MockCompilerContext) InitExecuteStmtParam(execPlan *plan.Execute) (*plan.Plan, tree.Statement, error) { //TODO implement me @@ -80,11 +80,11 @@ func (m *MockCompilerContext) CheckSubscriptionValid(subName, accName string, pu panic("implement me") } -func (m *MockCompilerContext) ResolveIndexTableByRef(ref *ObjectRef, tblName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext) ResolveIndexTableByRef(ref *plan.ObjectRef, tblName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { return m.Resolve(DbNameOfObjRef(ref), tblName, snapshot) } -func (m *MockCompilerContext) ResolveSubscriptionTableById(tableId uint64, pubmeta *SubscriptionMeta) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext) ResolveSubscriptionTableById(tableId uint64, pubmeta *plan.SubscriptionMeta) (*plan.ObjectRef, *plan.TableDef, error) { return nil, nil, nil } @@ -145,8 +145,8 @@ type index struct { // NewEmptyCompilerContext for test create/drop statement func NewEmptyCompilerContext() *MockCompilerContext { return &MockCompilerContext{ - objects: make(map[string]*ObjectRef), - tables: make(map[string]*TableDef), + objects: make(map[string]*plan.ObjectRef), + tables: make(map[string]*plan.TableDef), ctx: context.Background(), } } @@ -155,8 +155,8 @@ type Schema struct { cols []col pks []int idxs []index - fks []*ForeignKeyDef - clusterby *ClusterByDef + fks []*plan.ForeignKeyDef + clusterby *plan.ClusterByDef outcnt float64 tblId int64 isView bool @@ -705,7 +705,7 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { {"__mo_cbkey_003pid005pname", types.T_varchar, true, 65535, 0}, {catalog.Row_ID, types.T_Rowid, false, 16, 0}, }, - clusterby: &ClusterByDef{ + clusterby: &plan.ClusterByDef{ Name: "__mo_cbkey_003pid005pname", }, outcnt: 14, @@ -992,9 +992,9 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { } } - objects := make(map[string]*ObjectRef) - tables := make(map[string]*TableDef) - stats := make(map[string]*Stats) + objects := make(map[string]*plan.ObjectRef) + tables := make(map[string]*plan.TableDef) + stats := make(map[string]*plan.Stats) pks := make(map[string][]int) id2name := make(map[uint64]string) // build tpch/mo context data(schema) @@ -1005,10 +1005,10 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { if tblId == 0 { tblId = int64(tableIdx) } - colDefs := make([]*ColDef, 0, len(table.cols)) + colDefs := make([]*plan.ColDef, 0, len(table.cols)) for idx, col := range table.cols { - colDefs = append(colDefs, &ColDef{ + colDefs = append(colDefs, &plan.ColDef{ ColId: uint64(idx), Typ: plan.Type{ Id: int32(col.Id), @@ -1027,7 +1027,7 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { }) } - objects[tableName] = &ObjectRef{ + objects[tableName] = &plan.ObjectRef{ Server: 0, Db: 0, Schema: 0, @@ -1038,12 +1038,12 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { ObjName: tableName, } - tableDef := &TableDef{ + tableDef := &plan.TableDef{ TableType: catalog.SystemOrdinaryRel, TblId: uint64(tblId), Name: tableName, Cols: colDefs, - Indexes: make([]*IndexDef, len(table.idxs)), + Indexes: make([]*plan.IndexDef, len(table.idxs)), } if len(table.pks) == 1 { tableDef.Pkey = &plan.PrimaryKeyDef{ @@ -1174,7 +1174,7 @@ func NewMockCompilerContext(isDml bool) *MockCompilerContext { } } -func (m *MockCompilerContext) DatabaseExists(name string, snapshot *Snapshot) bool { +func (m *MockCompilerContext) DatabaseExists(name string, snapshot *plan.Snapshot) bool { if m.DatabaseExistsFunc != nil { return m.DatabaseExistsFunc(name, snapshot) } @@ -1184,7 +1184,7 @@ func (m *MockCompilerContext) DatabaseExists(name string, snapshot *Snapshot) bo return false } -func (m *MockCompilerContext) GetDatabaseId(dbName string, snapshot *Snapshot) (uint64, error) { +func (m *MockCompilerContext) GetDatabaseId(dbName string, snapshot *plan.Snapshot) (uint64, error) { if m.GetDatabaseIdFunc != nil { return m.GetDatabaseIdFunc(dbName, snapshot) } @@ -1203,9 +1203,9 @@ func (m *MockCompilerContext) GetUserName() string { return "root" } -func (m *MockCompilerContext) Resolve(dbName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext) Resolve(dbName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { name := strings.ToLower(tableName) - tableDef := DeepCopyTableDef(m.tables[name], true) + tableDef := plan.DeepCopyTableDef(m.tables[name], true) if tableDef != nil && !m.isDml { for i, col := range tableDef.Cols { if col.Typ.Id == int32(types.T_Rowid) { @@ -1228,9 +1228,9 @@ func (m *MockCompilerContext) Resolve(dbName string, tableName string, snapshot return m.objects[name], tableDef, nil } -func (m *MockCompilerContext) ResolveById(tableId uint64, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext) ResolveById(tableId uint64, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { name := m.id2name[tableId] - tableDef := DeepCopyTableDef(m.tables[name], true) + tableDef := plan.DeepCopyTableDef(m.tables[name], true) if tableDef != nil && !m.isDml { for i, col := range tableDef.Cols { if col.Typ.Id == int32(types.T_Rowid) { @@ -1242,7 +1242,7 @@ func (m *MockCompilerContext) ResolveById(tableId uint64, snapshot *Snapshot) (* return m.objects[name], tableDef, nil } -func (m *MockCompilerContext) Stats(obj *ObjectRef, snapshot *Snapshot) (*pb.StatsInfo, error) { +func (m *MockCompilerContext) Stats(obj *plan.ObjectRef, snapshot *plan.Snapshot) (*pb.StatsInfo, error) { return nil, nil } @@ -1276,7 +1276,7 @@ func (m *MockCompilerContext) GetProcess() *process.Process { return testutil.NewProc(nil) } -func (m *MockCompilerContext) GetQueryResultMeta(uuid string) ([]*ColDef, string, error) { +func (m *MockCompilerContext) GetQueryResultMeta(uuid string) ([]*plan.ColDef, string, error) { return nil, "", nil } @@ -1287,23 +1287,23 @@ func (m *MockCompilerContext) GetBuildingAlterView() (bool, string, string) { return false, "", "" } -func (m *MockCompilerContext) GetSubscriptionMeta(dbName string, snapshot *Snapshot) (*SubscriptionMeta, error) { +func (m *MockCompilerContext) GetSubscriptionMeta(dbName string, snapshot *plan.Snapshot) (*plan.SubscriptionMeta, error) { return nil, nil } -func (m *MockCompilerContext) SetQueryingSubscription(*SubscriptionMeta) { +func (m *MockCompilerContext) SetQueryingSubscription(*plan.SubscriptionMeta) { } -func (m *MockCompilerContext) GetQueryingSubscription() *SubscriptionMeta { +func (m *MockCompilerContext) GetQueryingSubscription() *plan.SubscriptionMeta { return nil } func (m *MockCompilerContext) IsPublishing(dbName string) (bool, error) { return false, nil } -func (m *MockCompilerContext) BuildTableDefByMoColumns(dbName, table string) (*TableDef, error) { +func (m *MockCompilerContext) BuildTableDefByMoColumns(dbName, table string) (*plan.TableDef, error) { return nil, nil } -func (m *MockCompilerContext) ResolveSnapshotWithSnapshotName(snapshotName string) (*Snapshot, error) { +func (m *MockCompilerContext) ResolveSnapshotWithSnapshotName(snapshotName string) (*plan.Snapshot, error) { return nil, nil } @@ -1327,7 +1327,7 @@ func NewMockOptimizer(_ bool) *MockOptimizer { } } -func (moc *MockOptimizer) Optimize(stmt tree.Statement) (*Query, error) { +func (moc *MockOptimizer) Optimize(stmt tree.Statement) (*plan.Query, error) { ctx := moc.CurrentContext() query, err := BuildPlan(ctx, stmt, false) if err != nil { diff --git a/pkg/sql/plan/mysql_compatibility.go b/pkg/sql/planner/mysql_compatibility.go similarity index 98% rename from pkg/sql/plan/mysql_compatibility.go rename to pkg/sql/planner/mysql_compatibility.go index b8bde8d82b60e..56ad251c5e0fb 100644 --- a/pkg/sql/plan/mysql_compatibility.go +++ b/pkg/sql/planner/mysql_compatibility.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import "github.com/matrixorigin/matrixone/pkg/pb/plan" diff --git a/pkg/sql/plan/ondup_update_binder.go b/pkg/sql/planner/ondup_update_binder.go similarity index 99% rename from pkg/sql/plan/ondup_update_binder.go rename to pkg/sql/planner/ondup_update_binder.go index ab40868e6aa41..4e45f6bf1557d 100644 --- a/pkg/sql/plan/ondup_update_binder.go +++ b/pkg/sql/planner/ondup_update_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/opt_misc.go b/pkg/sql/planner/opt_misc.go similarity index 97% rename from pkg/sql/plan/opt_misc.go rename to pkg/sql/planner/opt_misc.go index 7c97fe1c71c5c..dbc6b3bd2bbcd 100644 --- a/pkg/sql/plan/opt_misc.go +++ b/pkg/sql/planner/opt_misc.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "strconv" @@ -21,8 +21,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/runtime" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" ) func (builder *QueryBuilder) countColRefs(nodeID int32, colRefCnt map[[2]int32]int) { @@ -212,7 +212,7 @@ func (builder *QueryBuilder) canRemoveProject(parentType plan.Node_NodeType, nod return true } -func exprCanRemoveProject(expr *Expr) bool { +func exprCanRemoveProject(expr *plan.Expr) bool { switch ne := expr.Expr.(type) { case *plan.Expr_F: if ne.F.Func.ObjName == "sleep" { @@ -293,7 +293,7 @@ func replaceColumnsForExpr(expr *plan.Expr, projMap map[[2]int32]*plan.Expr) *pl case *plan.Expr_Col: mapID := [2]int32{ne.Col.RelPos, ne.Col.ColPos} if projExpr, ok := projMap[mapID]; ok { - return DeepCopyExpr(projExpr) + return plan.DeepCopyExpr(projExpr) } case *plan.Expr_F: @@ -826,7 +826,7 @@ func (builder *QueryBuilder) rewriteEffectlessAggToProject(nodeID int32) { } func makeBetweenExprFromDateFormat(equalFunc *plan.Function, dateformatFunc *plan.Function, intervalStr string, builder *QueryBuilder) *plan.Expr { - dateExpr := DeepCopyExpr(equalFunc.Args[1]) + dateExpr := plan.DeepCopyExpr(equalFunc.Args[1]) if intervalStr == "year" { sval, _ := dateExpr.GetLit().GetValue().(*plan.Literal_Sval) sval.Sval = sval.Sval + "0101" @@ -840,16 +840,16 @@ func makeBetweenExprFromDateFormat(equalFunc *plan.Function, dateformatFunc *pla return nil } interval := MakeIntervalExpr(1, intervalStr) - end, err := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "+", []*Expr{DeepCopyExpr(begin), interval}) + end, err := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "+", []*plan.Expr{plan.DeepCopyExpr(begin), interval}) if err != nil { return nil } interval = MakeIntervalExpr(1, "microsecond") - end, err = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "-", []*Expr{DeepCopyExpr(end), interval}) + end, err = bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "-", []*plan.Expr{plan.DeepCopyExpr(end), interval}) if err != nil { return nil } - args := []*Expr{dateformatFunc.Args[0], begin, end} + args := []*plan.Expr{dateformatFunc.Args[0], begin, end} newFilter, err := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "between", args) if err != nil { return nil @@ -978,7 +978,7 @@ func (builder *QueryBuilder) optimizeLikeExpr(nodeID int32) { // if no _ and % in the last, we can replace the origin filter replaceOrigin := (index1 == -1) && (index2 == len(str)-1) if !replaceOrigin { - newFilter = DeepCopyExpr(newFilter) + newFilter = plan.DeepCopyExpr(newFilter) newFilters = append(newFilters, newFilter) } newFunc := newFilter.GetF() @@ -986,13 +986,13 @@ func (builder *QueryBuilder) optimizeLikeExpr(nodeID int32) { newFunc.Func.Obj = function.PrefixEqualFunctionEncodedID newFunc.Args[1].GetLit().Value.(*plan.Literal_Sval).Sval = newStr if replaceOrigin { - node.BlockFilterList = append(node.BlockFilterList, DeepCopyExpr(newFilter)) + node.BlockFilterList = append(node.BlockFilterList, plan.DeepCopyExpr(newFilter)) } } } if len(newFilters) > 0 { node.FilterList = append(node.FilterList, newFilters...) - node.BlockFilterList = append(node.BlockFilterList, DeepCopyExprList(newFilters)...) + node.BlockFilterList = append(node.BlockFilterList, plan.DeepCopyExprList(newFilters)...) } } @@ -1176,7 +1176,7 @@ func (builder *QueryBuilder) lockTableIfLockNoRowsAtTheEndForDelAndUpdate() (err } if lockTarget != nil { - var lockRows *Expr + var lockRows *plan.Expr pkName := tableDef.Name + "." + tableDef.Pkey.Names[0] checkIsPkColExpr := func(e *plan.Expr) bool { if col_expr, ok := e.Expr.(*plan.Expr_Col); ok { diff --git a/pkg/sql/plan/opt_misc_test.go b/pkg/sql/planner/opt_misc_test.go similarity index 95% rename from pkg/sql/plan/opt_misc_test.go rename to pkg/sql/planner/opt_misc_test.go index 6901b167bd536..8797a4e8b613b 100644 --- a/pkg/sql/plan/opt_misc_test.go +++ b/pkg/sql/planner/opt_misc_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -35,11 +35,11 @@ func TestRemapWindowClause(t *testing.T) { }, } - f := &Expr{ + f := &plan.Expr{ Expr: &plan.Expr_F{ F: &plan.Function{ Func: getFunctionObjRef(1, "n"), - Args: []*Expr{expr}, + Args: []*plan.Expr{expr}, }, }, Typ: plan.Type{}, diff --git a/pkg/sql/plan/optimize.go b/pkg/sql/planner/optimize.go similarity index 92% rename from pkg/sql/plan/optimize.go rename to pkg/sql/planner/optimize.go index fc19014ec56e9..2c625e679ea73 100644 --- a/pkg/sql/plan/optimize.go +++ b/pkg/sql/planner/optimize.go @@ -12,13 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" ) var defaultRules = []Rule{} @@ -52,7 +52,7 @@ func (opt *BaseOptimizer) CurrentContext() CompilerContext { return opt.ctx } -func (opt *BaseOptimizer) Optimize(stmt tree.Statement, isPrepareStmt bool) (*Query, error) { +func (opt *BaseOptimizer) Optimize(stmt tree.Statement, isPrepareStmt bool) (*plan.Query, error) { pn, err := BuildPlan(opt.ctx, stmt, isPrepareStmt) if err != nil { return nil, err @@ -65,7 +65,7 @@ func (opt *BaseOptimizer) Optimize(stmt tree.Statement, isPrepareStmt bool) (*Qu return opt.optimize() } -func (opt *BaseOptimizer) optimize() (*Query, error) { +func (opt *BaseOptimizer) optimize() (*plan.Query, error) { if len(opt.qry.Steps) == 0 { return opt.qry, nil } @@ -78,7 +78,7 @@ func (opt *BaseOptimizer) optimize() (*Query, error) { return opt.qry, nil } -func (opt *BaseOptimizer) exploreNode(n *Node) { +func (opt *BaseOptimizer) exploreNode(n *plan.Node) { for i := range n.Children { opt.exploreNode(opt.qry.Nodes[n.Children[i]]) } diff --git a/pkg/sql/plan/order_binder.go b/pkg/sql/planner/order_binder.go similarity index 99% rename from pkg/sql/plan/order_binder.go rename to pkg/sql/planner/order_binder.go index 599c7465a337c..3c465bb6008b0 100644 --- a/pkg/sql/plan/order_binder.go +++ b/pkg/sql/planner/order_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/parse_jsonl_tvf.go b/pkg/sql/planner/parse_jsonl_tvf.go similarity index 99% rename from pkg/sql/plan/parse_jsonl_tvf.go rename to pkg/sql/planner/parse_jsonl_tvf.go index b773943d9f8e2..a21923c7545e9 100644 --- a/pkg/sql/plan/parse_jsonl_tvf.go +++ b/pkg/sql/planner/parse_jsonl_tvf.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "encoding/json" diff --git a/pkg/sql/plan/partition_binder.go b/pkg/sql/planner/partition_binder.go similarity index 99% rename from pkg/sql/plan/partition_binder.go rename to pkg/sql/planner/partition_binder.go index 63d353b5c1832..dda3be61a4ea0 100644 --- a/pkg/sql/plan/partition_binder.go +++ b/pkg/sql/planner/partition_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/partition_binder_test.go b/pkg/sql/planner/partition_binder_test.go similarity index 98% rename from pkg/sql/plan/partition_binder_test.go rename to pkg/sql/planner/partition_binder_test.go index 52e95466a55dc..cd150d5ca6661 100644 --- a/pkg/sql/plan/partition_binder_test.go +++ b/pkg/sql/planner/partition_binder_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -249,7 +249,7 @@ func newTestPartitionBinder() *PartitionBinder { builder := NewQueryBuilder(plan.Query_SELECT, mock.CurrentContext(), false, false) bindContext := NewBindContext(builder, nil) - nodeID := builder.appendNode(&Node{ + nodeID := builder.appendNode(&plan.Node{ NodeType: plan.Node_TABLE_SCAN, Stats: nil, ObjRef: nil, @@ -269,17 +269,17 @@ func newTestTableDef( id uint64, columns []string, types []types.T, -) *TableDef { - def := &TableDef{ +) *plan.TableDef { + def := &plan.TableDef{ TblId: id, } for idx, col := range columns { def.Cols = append( def.Cols, - &ColDef{ + &plan.ColDef{ Name: col, - Typ: Type{Id: int32(types[idx])}, + Typ: plan.Type{Id: int32(types[idx])}, }, ) } diff --git a/pkg/sql/plan/plugin.go b/pkg/sql/planner/plugin.go similarity index 96% rename from pkg/sql/plan/plugin.go rename to pkg/sql/planner/plugin.go index 79562edbc1525..5fcb584c3b31f 100644 --- a/pkg/sql/plan/plugin.go +++ b/pkg/sql/planner/plugin.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -43,7 +43,7 @@ func (builder *QueryBuilder) buildPluginExec(tbl *tree.TableFunction, ctx *BindC return 0, moerr.NewInvalidInput(builder.GetContext(), "Invalid number of arguments (NARGS != 4).") } - colDefs := DeepCopyColDefList(pluginColdefs) + colDefs := plan.DeepCopyColDefList(pluginColdefs) node := &plan.Node{ NodeType: plan.Node_FUNCTION_SCAN, diff --git a/pkg/sql/plan/processlist.go b/pkg/sql/planner/processlist.go similarity index 99% rename from pkg/sql/plan/processlist.go rename to pkg/sql/planner/processlist.go index f0a0f23c15ac0..83e52d92193fb 100644 --- a/pkg/sql/plan/processlist.go +++ b/pkg/sql/planner/processlist.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "strings" diff --git a/pkg/sql/plan/projection_binder.go b/pkg/sql/planner/projection_binder.go similarity index 98% rename from pkg/sql/plan/projection_binder.go rename to pkg/sql/planner/projection_binder.go index 9e5f248a3e5ce..82d848afb678b 100644 --- a/pkg/sql/plan/projection_binder.go +++ b/pkg/sql/planner/projection_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "math" @@ -22,10 +22,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" ) func NewProjectionBinder(builder *QueryBuilder, ctx *BindContext, havingBinder *HavingBinder) *ProjectionBinder { @@ -313,7 +313,7 @@ func (b *ProjectionBinder) makeFrameConstValue(expr tree.Expr, typ *plan.Type) ( }, nil } -func (b *ProjectionBinder) resetInterval(e *Expr) (*Expr, error) { +func (b *ProjectionBinder) resetInterval(e *plan.Expr) (*plan.Expr, error) { e1 := e.Expr.(*plan.Expr_List).List.List[0] e2 := e.Expr.(*plan.Expr_List).List.List[1] diff --git a/pkg/sql/plan/projection_binder_test.go b/pkg/sql/planner/projection_binder_test.go similarity index 99% rename from pkg/sql/plan/projection_binder_test.go rename to pkg/sql/planner/projection_binder_test.go index d729387eea93d..a74c4bd7cd701 100644 --- a/pkg/sql/plan/projection_binder_test.go +++ b/pkg/sql/planner/projection_binder_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "math" diff --git a/pkg/sql/plan/pushdown.go b/pkg/sql/planner/pushdown.go similarity index 96% rename from pkg/sql/plan/pushdown.go rename to pkg/sql/planner/pushdown.go index fd31c20f2b57a..86558d05c3f61 100644 --- a/pkg/sql/plan/pushdown.go +++ b/pkg/sql/planner/pushdown.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "fmt" @@ -254,7 +254,7 @@ func (builder *QueryBuilder) pushdownFilters(nodeID int32, filters []*plan.Expr, for _, key := range keys { extraFilter := walkThroughDNF(builder.GetContext(), filter, key) if extraFilter != nil { - extraFilters = append(extraFilters, DeepCopyExpr(extraFilter)) + extraFilters = append(extraFilters, plan.DeepCopyExpr(extraFilter)) joinSides = append(joinSides, getJoinSide(extraFilter, leftTags, rightTags, markTag)) } } @@ -273,7 +273,7 @@ func (builder *QueryBuilder) pushdownFilters(nodeID int32, filters []*plan.Expr, switch node.JoinType { case plan.Node_INNER: - leftPushdown = append(leftPushdown, DeepCopyExpr(filter)) + leftPushdown = append(leftPushdown, plan.DeepCopyExpr(filter)) rightPushdown = append(rightPushdown, filter) case plan.Node_LEFT, plan.Node_SEMI, plan.Node_ANTI, plan.Node_SINGLE, plan.Node_MARK: @@ -392,15 +392,15 @@ func (builder *QueryBuilder) pushdownFilters(nodeID int32, filters []*plan.Expr, for _, filter := range filters { down := false if builder.checkExprCanPushdown(filter, builder.qry.Nodes[node.Children[0]]) { - leftPushdown = append(leftPushdown, DeepCopyExpr(filter)) + leftPushdown = append(leftPushdown, plan.DeepCopyExpr(filter)) down = true } if builder.checkExprCanPushdown(filter, builder.qry.Nodes[node.Children[1]]) { - rightPushdown = append(rightPushdown, DeepCopyExpr(filter)) + rightPushdown = append(rightPushdown, plan.DeepCopyExpr(filter)) down = true } if !down { - cantPushdown = append(cantPushdown, DeepCopyExpr(filter)) + cantPushdown = append(cantPushdown, plan.DeepCopyExpr(filter)) } } } @@ -438,7 +438,7 @@ func (builder *QueryBuilder) pushdownFilters(nodeID int32, filters []*plan.Expr, var canPushDownRight []*plan.Expr for _, filter := range filters { - canPushdown = append(canPushdown, replaceColRefsForSet(DeepCopyExpr(filter), leftChild.ProjectList)) + canPushdown = append(canPushdown, replaceColRefsForSet(plan.DeepCopyExpr(filter), leftChild.ProjectList)) canPushDownRight = append(canPushDownRight, replaceColRefsForSet(filter, rightChild.ProjectList)) } @@ -503,9 +503,9 @@ func (builder *QueryBuilder) pushdownFilters(nodeID int32, filters []*plan.Expr, selfFilters := make([]*plan.Expr, 0) for _, filter := range filters { if onlyContainsTag(filter, node.BindingTags[0]) { - selfFilters = append(selfFilters, DeepCopyExpr(filter)) + selfFilters = append(selfFilters, plan.DeepCopyExpr(filter)) } else { - downFilters = append(downFilters, DeepCopyExpr(filter)) + downFilters = append(downFilters, plan.DeepCopyExpr(filter)) } } node.FilterList = append(node.FilterList, selfFilters...) @@ -581,10 +581,10 @@ func (builder *QueryBuilder) pushdownTopThroughLeftJoin(nodeID int32) { } } - nodePushDown = DeepCopyNode(node) + nodePushDown = plan.DeepCopyNode(node) if nodePushDown.Offset != nil { - newExpr, err := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "+", []*Expr{nodePushDown.Limit, nodePushDown.Offset}) + newExpr, err := bindFuncExprAndConstFold(builder.GetContext(), builder.compCtx.GetProcess(), "+", []*plan.Expr{nodePushDown.Limit, nodePushDown.Offset}) if err != nil { goto END } @@ -691,7 +691,7 @@ func (builder *QueryBuilder) pushdownVectorIndexTopToTableScan(nodeID int32) { Flag: node.OrderBy[0].Flag, }, }, - Limit: DeepCopyExpr(node.Limit), + Limit: plan.DeepCopyExpr(node.Limit), } if ctxVal := builder.compCtx.GetProcess().Ctx.Value(defines.IvfReaderParam{}); ctxVal != nil { if readerParam, ok := ctxVal.(*plan.IndexReaderParam); ok { diff --git a/pkg/sql/plan/pushdown_test.go b/pkg/sql/planner/pushdown_test.go similarity index 90% rename from pkg/sql/plan/pushdown_test.go rename to pkg/sql/planner/pushdown_test.go index 2fd69420c6ced..0973124a7dc3c 100644 --- a/pkg/sql/plan/pushdown_test.go +++ b/pkg/sql/planner/pushdown_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" @@ -31,7 +31,7 @@ func setupLeftJoinBase(t *testing.T) (*MockCompilerContext, *QueryBuilder, *plan leftTag := builder.genNewBindTag() rightTag := builder.genNewBindTag() - intType := Type{Id: int32(types.T_int64)} + intType := plan.Type{Id: int32(types.T_int64)} leftIDCol := &plan.Expr{ Typ: intType, @@ -62,7 +62,7 @@ func setupLeftJoinBase(t *testing.T) (*MockCompilerContext, *QueryBuilder, *plan } onExpr, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(leftIDCol), DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(leftIDCol), plan.DeepCopyExpr(rightIDCol), }) require.NoError(t, err) @@ -71,15 +71,15 @@ func setupLeftJoinBase(t *testing.T) (*MockCompilerContext, *QueryBuilder, *plan NodeType: plan.Node_TABLE_SCAN, BindingTags: []int32{leftTag}, ProjectList: []*plan.Expr{ - DeepCopyExpr(leftIDCol), - DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(leftIDCol), + plan.DeepCopyExpr(leftSpaceCol), }, }, { NodeType: plan.Node_TABLE_SCAN, BindingTags: []int32{rightTag}, ProjectList: []*plan.Expr{ - DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(rightIDCol), }, }, { @@ -88,9 +88,9 @@ func setupLeftJoinBase(t *testing.T) (*MockCompilerContext, *QueryBuilder, *plan Children: []int32{0, 1}, OnList: []*plan.Expr{onExpr}, ProjectList: []*plan.Expr{ - DeepCopyExpr(leftIDCol), - DeepCopyExpr(leftSpaceCol), - DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(leftIDCol), + plan.DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(rightIDCol), }, }, } @@ -102,7 +102,7 @@ func TestLeftJoinOrFilterKeepsLeftJoin(t *testing.T) { ctx, builder, leftIDCol, rightIDCol, leftSpaceCol := setupLeftJoinBase(t) isNotNullExpr, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "isnotnull", []*plan.Expr{ - DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(rightIDCol), }) require.NoError(t, err) @@ -115,7 +115,7 @@ func TestLeftJoinOrFilterKeepsLeftJoin(t *testing.T) { }, } eqExpr, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(leftSpaceCol), constExpr, }) require.NoError(t, err) @@ -145,7 +145,7 @@ func TestLeftJoinOrFilterWithConstKeepsLeftJoin(t *testing.T) { }, } rightEqConst, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(rightIDCol), rightConst, }) require.NoError(t, err) @@ -159,7 +159,7 @@ func TestLeftJoinOrFilterWithConstKeepsLeftJoin(t *testing.T) { }, } leftEqConst, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(leftSpaceCol), leftConst, }) require.NoError(t, err) @@ -179,12 +179,12 @@ func TestLeftJoinOrFilterWithAndKeepsLeftJoin(t *testing.T) { ctx, builder, leftIDCol, rightIDCol, leftSpaceCol := setupLeftJoinBase(t) isNotNullExpr, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "isnotnull", []*plan.Expr{ - DeepCopyExpr(rightIDCol), + plan.DeepCopyExpr(rightIDCol), }) require.NoError(t, err) leftEquals11, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(leftSpaceCol), { Typ: leftIDCol.Typ, Expr: &plan.Expr_Lit{ @@ -203,7 +203,7 @@ func TestLeftJoinOrFilterWithAndKeepsLeftJoin(t *testing.T) { require.NoError(t, err) leftEquals12, err := BindFuncExprImplByPlanExpr(ctx.GetContext(), "=", []*plan.Expr{ - DeepCopyExpr(leftSpaceCol), + plan.DeepCopyExpr(leftSpaceCol), { Typ: leftIDCol.Typ, Expr: &plan.Expr_Lit{ diff --git a/pkg/sql/plan/query_builder.go b/pkg/sql/planner/query_builder.go similarity index 98% rename from pkg/sql/plan/query_builder.go rename to pkg/sql/planner/query_builder.go index 193248c729a64..20e5dec0fa73a 100644 --- a/pkg/sql/plan/query_builder.go +++ b/pkg/sql/planner/query_builder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -31,10 +31,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/options" ) @@ -86,7 +86,7 @@ func NewQueryBuilder(queryType plan.Query_StatementType, ctx CompilerContext, is } return &QueryBuilder{ - qry: &Query{ + qry: &plan.Query{ StmtType: queryType, MaxDop: int64(maxDop), }, @@ -98,7 +98,7 @@ func NewQueryBuilder(queryType plan.Query_StatementType, ctx CompilerContext, is nextBindTag: 0, mysqlCompatible: mysqlCompatible, aggSpillMem: aggSpillMem, - tag2Table: make(map[int32]*TableDef), + tag2Table: make(map[int32]*plan.TableDef), tag2NodeID: make(map[int32]int32), isPrepareStatement: isPrepareStatement, deleteNode: make(map[uint64]int32), @@ -255,7 +255,7 @@ func (builder *QueryBuilder) remapSingleColRef(col *plan.ColRef, colMap map[[2]i return nil } -func (builder *QueryBuilder) remapColRefForExpr(expr *Expr, colMap map[[2]int32][2]int32, remapInfo *RemapInfo) error { +func (builder *QueryBuilder) remapColRefForExpr(expr *plan.Expr, colMap map[[2]int32][2]int32, remapInfo *RemapInfo) error { switch ne := expr.Expr.(type) { case *plan.Expr_Col: return builder.remapSingleColRef(ne.Col, colMap, remapInfo) @@ -313,7 +313,7 @@ func (m *ColRefRemapping) String() string { // XXX: It's dangerous to copy binding/message tags if both old and new nodes are eventually used. func (builder *QueryBuilder) copyNode(ctx *BindContext, nodeId int32) int32 { node := builder.qry.Nodes[nodeId] - newNode := DeepCopyNode(node) + newNode := plan.DeepCopyNode(node) newNode.Children = make([]int32, 0, len(node.Children)) for _, child := range node.Children { newNode.Children = append(newNode.Children, builder.copyNode(ctx, child)) @@ -348,7 +348,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt } tag := node.BindingTags[0] - newTableDef := DeepCopyTableDef(node.TableDef, false) + newTableDef := plan.DeepCopyTableDef(node.TableDef, false) for i, col := range node.TableDef.Cols { globalRef := [2]int32{tag, int32(i)} @@ -358,12 +358,12 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt internalRemapping.addColRef(globalRef) - newTableDef.Cols = append(newTableDef.Cols, DeepCopyColDef(col)) + newTableDef.Cols = append(newTableDef.Cols, plan.DeepCopyColDef(col)) } if len(newTableDef.Cols) == 0 { internalRemapping.addColRef([2]int32{tag, 0}) - newTableDef.Cols = append(newTableDef.Cols, DeepCopyColDef(node.TableDef.Cols[0])) + newTableDef.Cols = append(newTableDef.Cols, plan.DeepCopyColDef(node.TableDef.Cols[0])) } node.TableDef = newTableDef @@ -485,7 +485,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt } colTag := node.BindingTags[0] - newTableDef := DeepCopyTableDef(node.TableDef, false) + newTableDef := plan.DeepCopyTableDef(node.TableDef, false) for i, col := range node.TableDef.Cols { globalRef := [2]int32{colTag, int32(i)} @@ -494,7 +494,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt } internalRemapping.addColRef(globalRef) - newTableDef.Cols = append(newTableDef.Cols, DeepCopyColDef(col)) + newTableDef.Cols = append(newTableDef.Cols, plan.DeepCopyColDef(col)) } if len(node.BindingTags) > 1 { @@ -505,7 +505,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt if len(newTableDef.Cols) == 0 { internalRemapping.addColRef([2]int32{colTag, 0}) - newTableDef.Cols = append(newTableDef.Cols, DeepCopyColDef(node.TableDef.Cols[0])) + newTableDef.Cols = append(newTableDef.Cols, plan.DeepCopyColDef(node.TableDef.Cols[0])) } node.TableDef = newTableDef @@ -862,7 +862,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(idx), Name: builder.nameByColRef[globalRef], @@ -887,10 +887,10 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt remapping.addColRef(globalRef) - node.ProjectList = append(node.ProjectList, &Expr{ + node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -2, ColPos: int32(idx) + groupSize, Name: builder.nameByColRef[globalRef], @@ -933,11 +933,11 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt child := builder.qry.Nodes[node.Children[0]] if child.NodeType == plan.Node_TABLE_SCAN && len(child.FilterList) == 0 && len(node.GroupBy) == 0 && child.Limit == nil && child.Offset == nil { - child.AggList = make([]*Expr, 0, len(node.AggList)) + child.AggList = make([]*plan.Expr, 0, len(node.AggList)) for _, agg := range node.AggList { switch agg.GetF().Func.ObjName { case "starcount", "count", "min", "max": - child.AggList = append(child.AggList, DeepCopyExpr(agg)) + child.AggList = append(child.AggList, plan.DeepCopyExpr(agg)) default: child.AggList = nil } @@ -984,7 +984,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(len(node.ProjectList)), Name: builder.nameByColRef[globalRef], @@ -1010,10 +1010,10 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt remapping.addColRef(globalRef) - node.ProjectList = append(node.ProjectList, &Expr{ + node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -2, ColPos: int32(i) + offsetSize, Name: builder.nameByColRef[globalRef], @@ -1072,7 +1072,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: node.OrderBy[0].Expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(idx), Name: builder.nameByColRef[globalRef], @@ -1114,7 +1114,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(idx), Name: builder.nameByColRef[globalRef], @@ -1137,7 +1137,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: node.Timestamp.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(idx), Name: builder.nameByColRef[globalRef], @@ -1160,7 +1160,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: node.Timestamp.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(idx), Name: builder.nameByColRef[globalRef], @@ -1239,7 +1239,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt node.ProjectList = append(node.ProjectList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: -1, ColPos: int32(l), Name: builder.nameByColRef[globalRef], @@ -1419,7 +1419,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt newProjList = append(newProjList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: 0, ColPos: int32(i), }, @@ -1458,7 +1458,7 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt newProjList = append(newProjList, &plan.Expr{ Typ: expr.Typ, Expr: &plan.Expr_Col{ - Col: &ColRef{ + Col: &plan.ColRef{ RelPos: 0, ColPos: childRemapping.globalToLocal[[2]int32{resultTag, int32(i)}][1], // Name: builder.nameByColRef[globalRef], @@ -1528,8 +1528,8 @@ func (builder *QueryBuilder) remapAllColRefs(nodeID int32, step int32, colRefCnt // Rewrite DISTINCT to AGG node.NodeType = plan.Node_AGG preNode := builder.qry.Nodes[node.Children[0]] - node.GroupBy = make([]*Expr, len(preNode.ProjectList)) - node.ProjectList = make([]*Expr, len(preNode.ProjectList)) + node.GroupBy = make([]*plan.Expr, len(preNode.ProjectList)) + node.ProjectList = make([]*plan.Expr, len(preNode.ProjectList)) node.SpillMem = builder.aggSpillMem for i, prjExpr := range preNode.ProjectList { @@ -1970,7 +1970,7 @@ func (builder *QueryBuilder) rewriteStarApproxCount(nodeID int32) { var exprs []*plan.Expr str := child.ObjRef.SchemaName + "." + child.TableDef.Name exprs = append(exprs, &plan.Expr{ - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), NotNullable: true, Width: int32(len(str)), @@ -1985,7 +1985,7 @@ func (builder *QueryBuilder) rewriteStarApproxCount(nodeID int32) { }) str = child.TableDef.Cols[0].Name exprs = append(exprs, &plan.Expr{ - Typ: Type{ + Typ: plan.Type{ Id: int32(types.T_varchar), NotNullable: true, Width: int32(len(str)), @@ -2046,7 +2046,7 @@ func (builder *QueryBuilder) removeUnnecessaryProjections(nodeID int32) int32 { return childNodeID } -func (builder *QueryBuilder) createQuery() (*Query, error) { +func (builder *QueryBuilder) createQuery() (*plan.Query, error) { var err error colRefBool := make(map[[2]int32]bool) sinkColRef := make(map[[2]int32]int) @@ -2684,8 +2684,8 @@ func (builder *QueryBuilder) bindRecursiveCte( } // union all statement - var limitExpr *Expr - var offsetExpr *Expr + var limitExpr *plan.Expr + var offsetExpr *plan.Expr if s.Limit != nil { limitBinder := NewLimitBinder(builder, ctx) if s.Limit.Offset != nil { @@ -3048,8 +3048,8 @@ func (builder *QueryBuilder) bindSelect(stmt *tree.Select, ctx *BindContext, isR // bind TIME WINDOW var fillType plan.Node_FillType - var fillVals, fillCols []*Expr - var interval, sliding, ts, wEnd *Expr + var fillVals, fillCols []*plan.Expr + var interval, sliding, ts, wEnd *plan.Expr var boundTimeWindowOrderBy *plan.OrderBySpec if astTimeWindow != nil { if fillType, fillVals, fillCols, interval, sliding, ts, wEnd, boundTimeWindowOrderBy, err = builder.bindTimeWindow( @@ -3072,8 +3072,8 @@ func (builder *QueryBuilder) bindSelect(stmt *tree.Select, ctx *BindContext, isR } // bind limit/offset clause - var boundOffsetExpr *Expr - var boundCountExpr *Expr + var boundOffsetExpr *plan.Expr + var boundCountExpr *plan.Expr var rankOption *plan.RankOption if astLimit != nil || astRankOption != nil { if boundOffsetExpr, boundCountExpr, rankOption, err = builder.bindLimit(ctx, astLimit, astRankOption); err != nil { @@ -3120,7 +3120,7 @@ func (builder *QueryBuilder) bindSelect(stmt *tree.Select, ctx *BindContext, isR // This ensures data is grouped together before applying group_concat order for _, groupExpr := range ctx.groups { sortSpecs = append(sortSpecs, &plan.OrderBySpec{ - Expr: DeepCopyExpr(groupExpr), + Expr: plan.DeepCopyExpr(groupExpr), Flag: plan.OrderBySpec_ASC, }) } @@ -3290,7 +3290,7 @@ func (builder *QueryBuilder) bindSelectClause( } } - lockNode = &Node{ + lockNode = &plan.Node{ NodeType: plan.Node_LOCK_OP, Children: []int32{nodeID}, TableDef: tableDef, @@ -3619,8 +3619,8 @@ func (builder *QueryBuilder) bindTimeWindow( helpFunc *helpFunc, ) ( fillType plan.Node_FillType, - fillVals, fillCols []*Expr, - interval, sliding, ts, wEnd *Expr, + fillVals, fillCols []*plan.Expr, + interval, sliding, ts, wEnd *plan.Expr, boundTimeWindowOrderBy *plan.OrderBySpec, err error, ) { @@ -3688,7 +3688,7 @@ func (builder *QueryBuilder) bindTimeWindow( fillType = plan.Node_LINEAR } - var v, castedExpr *Expr + var v, castedExpr *plan.Expr if astTimeWindow.Fill.Val != nil { if v, err = projectionBinder.BindExpr(astTimeWindow.Fill.Val, 0, true); err != nil { return @@ -3784,7 +3784,7 @@ func (builder *QueryBuilder) bindLimit( ctx *BindContext, astLimit *tree.Limit, astRankOption *tree.RankOption, -) (boundOffsetExpr, boundCountExpr *Expr, rankOption *plan.RankOption, err error) { +) (boundOffsetExpr, boundCountExpr *plan.Expr, rankOption *plan.RankOption, err error) { if astLimit != nil { limitBinder := NewLimitBinder(builder, ctx) if astLimit.Offset != nil { @@ -4004,8 +4004,8 @@ func (builder *QueryBuilder) appendTimeWindowNode( boundTimeWindowOrderBy *plan.OrderBySpec, boundTimeWindowGroupBy *plan.Expr, fillType plan.Node_FillType, - fillVals, fillCols []*Expr, - interval, sliding, ts, wEnd *Expr, + fillVals, fillCols []*plan.Expr, + interval, sliding, ts, wEnd *plan.Expr, astTimeWindow *tree.TimeWindow, ) (newNodeID int32, err error) { if ctx.bindingRecurStmt() { @@ -4075,7 +4075,7 @@ func (builder *QueryBuilder) appendWindowNode( nodeID = builder.appendNode(&plan.Node{ NodeType: plan.Node_WINDOW, Children: []int32{nodeID}, - WinSpecList: []*Expr{w}, + WinSpecList: []*plan.Expr{w}, WindowIdx: int32(i), BindingTags: []int32{ctx.windowTag}, }, ctx) @@ -4527,9 +4527,9 @@ func getSelectTree(s *tree.Select) *tree.Select { func (builder *QueryBuilder) bindView( ctx *BindContext, - tableDef *TableDef, - snapshot *Snapshot, - obj *ObjectRef, + tableDef *plan.TableDef, + snapshot *plan.Snapshot, + obj *plan.ObjectRef, schema, table string, ) (nodeID int32, err error) { viewDefString := tableDef.ViewSql.View @@ -4710,7 +4710,7 @@ func (builder *QueryBuilder) buildTable(stmt tree.TableExpr, ctx *BindContext, p } } - var snapshot *Snapshot + var snapshot *plan.Snapshot if ctx.snapshot != nil { snapshot = ctx.snapshot } @@ -4721,10 +4721,10 @@ func (builder *QueryBuilder) buildTable(stmt tree.TableExpr, ctx *BindContext, p return 0, err } - var subMeta *SubscriptionMeta + var subMeta *plan.SubscriptionMeta subMeta, err = builder.compCtx.GetSubscriptionMeta(schema, snapshot) if err == nil && builder.isSkipResolveTableDef && snapshot == nil && subMeta == nil { - var tableDef *TableDef + var tableDef *plan.TableDef tableDef, err = builder.compCtx.BuildTableDefByMoColumns(schema, table) if err != nil { return 0, err @@ -4767,7 +4767,7 @@ func (builder *QueryBuilder) buildTable(stmt tree.TableExpr, ctx *BindContext, p Type: int32(plan.ExternType_EXTERNAL_TB), TbColToDataCol: tbColToDataCol, } - col := &ColDef{ + col := &plan.ColDef{ Name: catalog.ExternalFilePath, Typ: plan.Type{ Id: int32(types.T_varchar), @@ -5375,7 +5375,7 @@ func parseRankOption(options map[string]string, ctx context.Context) (*plan.Rank return rankOption, nil } -func (builder *QueryBuilder) checkExprCanPushdown(expr *Expr, node *Node) bool { +func (builder *QueryBuilder) checkExprCanPushdown(expr *plan.Expr, node *plan.Node) bool { switch node.NodeType { case plan.Node_FUNCTION_SCAN: if onlyContainsTag(expr, node.BindingTags[0]) { @@ -5410,7 +5410,7 @@ func (builder *QueryBuilder) checkExprCanPushdown(expr *Expr, node *Node) bool { } } -func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot *Snapshot, err error) { +func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot *plan.Snapshot, err error) { if tsExpr == nil { return } @@ -5433,9 +5433,9 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * return } - var tenant *SnapshotTenant + var tenant *plan.SnapshotTenant if bgSnapshot := builder.compCtx.GetSnapshot(); IsSnapshotValid(bgSnapshot) { - tenant = &SnapshotTenant{ + tenant = &plan.SnapshotTenant{ TenantName: bgSnapshot.Tenant.TenantName, TenantID: bgSnapshot.Tenant.TenantID, } @@ -5456,7 +5456,7 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * } if time.Now().UTC().UnixNano()-tsNano <= options.DefaultGCTTL.Nanoseconds() && 0 <= time.Now().UTC().UnixNano()-tsNano { - snapshot = &Snapshot{TS: ×tamp.Timestamp{PhysicalTime: tsNano}, Tenant: tenant} + snapshot = &plan.Snapshot{TS: ×tamp.Timestamp{PhysicalTime: tsNano}, Tenant: tenant} } else { var valid bool if valid, err = builder.compCtx.CheckTimeStampValid(tsNano); err != nil { @@ -5468,7 +5468,7 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * return } - snapshot = &Snapshot{TS: ×tamp.Timestamp{PhysicalTime: tsNano}, Tenant: tenant} + snapshot = &plan.Snapshot{TS: ×tamp.Timestamp{PhysicalTime: tsNano}, Tenant: tenant} } } else if tsExpr.Type == tree.ATTIMESTAMPSNAPSHOT { return builder.compCtx.ResolveSnapshotWithSnapshotName(lit.Sval) @@ -5478,14 +5478,14 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * return } - snapshot = &Snapshot{TS: &ts, Tenant: tenant} + snapshot = &plan.Snapshot{TS: &ts, Tenant: tenant} } else if tsExpr.Type == tree.ASOFTIMESTAMP { var ts int64 if ts, err = doResolveTimeStamp(lit.Sval); err != nil { return } tStamp := ×tamp.Timestamp{PhysicalTime: ts} - snapshot = &Snapshot{TS: tStamp, Tenant: tenant} + snapshot = &plan.Snapshot{TS: tStamp, Tenant: tenant} } else { err = moerr.NewInvalidArg(builder.GetContext(), "invalid timestamp hint type", tsExpr.Type.String()) return @@ -5496,19 +5496,19 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * err = moerr.NewInvalidArg(builder.GetContext(), "invalid timestamp value", lit.I64Val) return } - snapshot = &Snapshot{TS: ×tamp.Timestamp{PhysicalTime: lit.I64Val}, Tenant: tenant} + snapshot = &plan.Snapshot{TS: ×tamp.Timestamp{PhysicalTime: lit.I64Val}, Tenant: tenant} } else if tsExpr.Type == tree.ATMOTIMESTAMP { if lit.I64Val <= 0 { err = moerr.NewInvalidArg(builder.GetContext(), "invalid timestamp value", lit.I64Val) return } if bgSnapshot := builder.compCtx.GetSnapshot(); builder.isRestoreByTs { - tenant = &SnapshotTenant{ + tenant = &plan.SnapshotTenant{ TenantName: bgSnapshot.Tenant.TenantName, TenantID: bgSnapshot.Tenant.TenantID, } } - snapshot = &Snapshot{TS: ×tamp.Timestamp{PhysicalTime: lit.I64Val}, Tenant: tenant} + snapshot = &plan.Snapshot{TS: ×tamp.Timestamp{PhysicalTime: lit.I64Val}, Tenant: tenant} } else { err = moerr.NewInvalidArg(builder.GetContext(), "invalid timestamp hint for snapshot hint", lit.I64Val) return @@ -5520,7 +5520,7 @@ func (builder *QueryBuilder) ResolveTsHint(tsExpr *tree.AtTimeStamp) (snapshot * return } -func IsSnapshotValid(snapshot *Snapshot) bool { +func IsSnapshotValid(snapshot *plan.Snapshot) bool { if snapshot == nil { return false } diff --git a/pkg/sql/plan/query_builder_test.go b/pkg/sql/planner/query_builder_test.go similarity index 99% rename from pkg/sql/plan/query_builder_test.go rename to pkg/sql/planner/query_builder_test.go index 4ce413b200fe3..38e18912d494c 100644 --- a/pkg/sql/plan/query_builder_test.go +++ b/pkg/sql/planner/query_builder_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -35,8 +35,8 @@ func TestBuildTable_AlterView(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() type arg struct { - obj *ObjectRef - table *TableDef + obj *plan.ObjectRef + table *plan.TableDef } store := make(map[string]arg) @@ -44,7 +44,7 @@ func TestBuildTable_AlterView(t *testing.T) { &plan.ObjectRef{}, &plan.TableDef{ TableType: catalog.SystemOrdinaryRel, - Cols: []*ColDef{ + Cols: []*plan.ColDef{ { Name: "a", Typ: plan.Type{ @@ -72,7 +72,7 @@ func TestBuildTable_AlterView(t *testing.T) { ctx := NewMockCompilerContext2(ctrl) ctx.EXPECT().ResolveVariable(gomock.Any(), gomock.Any(), gomock.Any()).Return("", nil).AnyTimes() ctx.EXPECT().Resolve(gomock.Any(), gomock.Any(), gomock.Any()).DoAndReturn( - func(schemaName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { + func(schemaName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { if schemaName == "" { schemaName = "db" } @@ -103,7 +103,7 @@ func TestTempTableAliasBindingUsesOriginName(t *testing.T) { ctx := NewMockCompilerContext2(ctrl) ctx.EXPECT().ResolveVariable(gomock.Any(), gomock.Any(), gomock.Any()).Return("", nil).AnyTimes() ctx.EXPECT().Resolve(gomock.Any(), gomock.Any(), gomock.Any()).DoAndReturn( - func(schemaName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { + func(schemaName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { if schemaName == "" { schemaName = "db" } diff --git a/pkg/sql/plan/recursive_test.go b/pkg/sql/planner/recursive_test.go similarity index 98% rename from pkg/sql/plan/recursive_test.go rename to pkg/sql/planner/recursive_test.go index 22dc46a6f3c38..f5e950b37f9ab 100644 --- a/pkg/sql/plan/recursive_test.go +++ b/pkg/sql/planner/recursive_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/result_scan.go b/pkg/sql/planner/result_scan.go similarity index 99% rename from pkg/sql/plan/result_scan.go rename to pkg/sql/planner/result_scan.go index bbd602dae8ccb..9493d6ca364c5 100644 --- a/pkg/sql/plan/result_scan.go +++ b/pkg/sql/planner/result_scan.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "encoding/json" diff --git a/pkg/sql/plan/rule/constant_fold.go b/pkg/sql/planner/rule/constant_fold.go similarity index 99% rename from pkg/sql/plan/rule/constant_fold.go rename to pkg/sql/planner/rule/constant_fold.go index 8974c04092c1b..45dc22a1852a6 100644 --- a/pkg/sql/plan/rule/constant_fold.go +++ b/pkg/sql/planner/rule/constant_fold.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/plan/rule/predicate_pushdown.go b/pkg/sql/planner/rule/predicate_pushdown.go similarity index 98% rename from pkg/sql/plan/rule/predicate_pushdown.go rename to pkg/sql/planner/rule/predicate_pushdown.go index 10856328628f9..2baf772ebfd82 100644 --- a/pkg/sql/plan/rule/predicate_pushdown.go +++ b/pkg/sql/planner/rule/predicate_pushdown.go @@ -16,7 +16,7 @@ package rule import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) type PredicatePushdown struct { diff --git a/pkg/sql/plan/runtime_filter.go b/pkg/sql/planner/runtime_filter.go similarity index 97% rename from pkg/sql/plan/runtime_filter.go rename to pkg/sql/planner/runtime_filter.go index f0a931b16fee6..421c6207b971e 100644 --- a/pkg/sql/plan/runtime_filter.go +++ b/pkg/sql/planner/runtime_filter.go @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/runtime" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) const ( @@ -198,9 +198,9 @@ func (builder *QueryBuilder) generateRuntimeFilters(nodeID int32) { notOnPk := probeCol.Name != tableDef.Pkey.PkeyColName if convertToCPKey { - leftChild.RuntimeFilterProbeList = append(leftChild.RuntimeFilterProbeList, MakeCPKEYRuntimeFilter(rfTag, 0, DeepCopyExpr(probeExprs[0]), tableDef, notOnPk)) + leftChild.RuntimeFilterProbeList = append(leftChild.RuntimeFilterProbeList, MakeCPKEYRuntimeFilter(rfTag, 0, plan.DeepCopyExpr(probeExprs[0]), tableDef, notOnPk)) } else { - leftChild.RuntimeFilterProbeList = append(leftChild.RuntimeFilterProbeList, MakeRuntimeFilter(rfTag, false, 0, DeepCopyExpr(probeExprs[0]), notOnPk)) + leftChild.RuntimeFilterProbeList = append(leftChild.RuntimeFilterProbeList, MakeRuntimeFilter(rfTag, false, 0, plan.DeepCopyExpr(probeExprs[0]), notOnPk)) } inLimit := GetInFilterCardLimit(sid) diff --git a/pkg/sql/plan/set_binder.go b/pkg/sql/planner/set_binder.go similarity index 99% rename from pkg/sql/plan/set_binder.go rename to pkg/sql/planner/set_binder.go index cd347fd838381..6f645cc364aea 100644 --- a/pkg/sql/plan/set_binder.go +++ b/pkg/sql/planner/set_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/shuffle.go b/pkg/sql/planner/shuffle.go similarity index 99% rename from pkg/sql/plan/shuffle.go rename to pkg/sql/planner/shuffle.go index dff892e6df738..a3122eb08bd4b 100644 --- a/pkg/sql/plan/shuffle.go +++ b/pkg/sql/planner/shuffle.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "math" diff --git a/pkg/sql/plan/shuffle_algo.go b/pkg/sql/planner/shuffle_algo.go similarity index 99% rename from pkg/sql/plan/shuffle_algo.go rename to pkg/sql/planner/shuffle_algo.go index 98dba292a19a0..cbaa6e8b2850f 100644 --- a/pkg/sql/plan/shuffle_algo.go +++ b/pkg/sql/planner/shuffle_algo.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" diff --git a/pkg/sql/plan/shuffle_test.go b/pkg/sql/planner/shuffle_test.go similarity index 99% rename from pkg/sql/plan/shuffle_test.go rename to pkg/sql/planner/shuffle_test.go index 81c10518836a3..be79a171833f5 100644 --- a/pkg/sql/plan/shuffle_test.go +++ b/pkg/sql/planner/shuffle_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" diff --git a/pkg/sql/plan/stage.go b/pkg/sql/planner/stage.go similarity index 95% rename from pkg/sql/plan/stage.go rename to pkg/sql/planner/stage.go index e06425dd724b3..0df005f913101 100644 --- a/pkg/sql/plan/stage.go +++ b/pkg/sql/planner/stage.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" @@ -34,7 +34,7 @@ var ( ) func (builder *QueryBuilder) buildStageList(tbl *tree.TableFunction, ctx *BindContext, exprs []*plan.Expr, children []int32) (int32, error) { - colDefs := DeepCopyColDefList(stageListColDefs) + colDefs := plan.DeepCopyColDefList(stageListColDefs) node := &plan.Node{ NodeType: plan.Node_FUNCTION_SCAN, Stats: &plan.Stats{}, diff --git a/pkg/sql/plan/stats.go b/pkg/sql/planner/stats.go similarity index 96% rename from pkg/sql/plan/stats.go rename to pkg/sql/planner/stats.go index 4641a124153f6..0dd479cacc6e8 100644 --- a/pkg/sql/plan/stats.go +++ b/pkg/sql/planner/stats.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -34,7 +34,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/util" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/options" @@ -180,7 +180,7 @@ func NewTableStatsInfo(lenCols int) *TableStatsInfo { return info } -func AdjustNDV(info *TableStatsInfo, tableDef *TableDef, s *pb.StatsInfo) { +func AdjustNDV(info *TableStatsInfo, tableDef *plan.TableDef, s *pb.StatsInfo) { if info.AccurateObjectNumber > 1 { for i, coldef := range tableDef.Cols[:len(tableDef.Cols)-1] { if info.ColumnNDVs[i] > s.TableCnt { @@ -379,7 +379,7 @@ func UpdateStatsInfo(info *TableStatsInfo, tableDef *plan.TableDef, s *pb.StatsI } // cols in one table, return if ndv of multi column is high enough -func isHighNdvCols(cols []int32, tableDef *TableDef, builder *QueryBuilder) bool { +func isHighNdvCols(cols []int32, tableDef *plan.TableDef, builder *QueryBuilder) bool { if tableDef == nil { return false } @@ -400,7 +400,7 @@ func isHighNdvCols(cols []int32, tableDef *TableDef, builder *QueryBuilder) bool return totalNDV > s.TableCnt*highNDVcolumnThreshHold } -func (builder *QueryBuilder) getColNDVRatio(cols []int32, tableDef *TableDef) float64 { +func (builder *QueryBuilder) getColNDVRatio(cols []int32, tableDef *plan.TableDef) float64 { if tableDef == nil { return 0 } @@ -1050,7 +1050,7 @@ func ReCalcNodeStats(nodeID int32, builder *QueryBuilder, recursive bool, leafNo } } - var leftStats, rightStats, childStats *Stats + var leftStats, rightStats, childStats *plan.Stats if len(node.Children) == 1 { childStats = builder.qry.Nodes[node.Children[0]].Stats } else if len(node.Children) == 2 { @@ -1358,7 +1358,7 @@ func ReCalcNodeStats(nodeID int32, builder *QueryBuilder, recursive bool, leafNo } } else { // Slow path: need to fold the expression - limitExpr := DeepCopyExpr(node.Limit) + limitExpr := plan.DeepCopyExpr(node.Limit) if _, ok := limitExpr.Expr.(*plan.Expr_F); ok { if !hasParam(limitExpr) { limitExpr, _ = ConstantFold(batch.EmptyForConstFoldBatch, limitExpr, builder.compCtx.GetProcess(), true, true) @@ -1381,7 +1381,7 @@ func ReCalcNodeStats(nodeID int32, builder *QueryBuilder, recursive bool, leafNo } } else { // Slow path: need to fold the expression - limitExpr := DeepCopyExpr(node.IndexReaderParam.Limit) + limitExpr := plan.DeepCopyExpr(node.IndexReaderParam.Limit) if _, ok := limitExpr.Expr.(*plan.Expr_F); ok { if !hasParam(limitExpr) { limitExpr, _ = ConstantFold(batch.EmptyForConstFoldBatch, limitExpr, builder.compCtx.GetProcess(), true, true) @@ -1398,7 +1398,7 @@ func ReCalcNodeStats(nodeID int32, builder *QueryBuilder, recursive bool, leafNo } } -func computeFunctionScan(name string, exprs []*Expr, nodeStat *Stats) bool { +func computeFunctionScan(name string, exprs []*plan.Expr, nodeStat *plan.Stats) bool { if name != "generate_series" { return false } @@ -1429,10 +1429,10 @@ func computeFunctionScan(name string, exprs []*Expr, nodeStat *Stats) bool { return true } -func getCost(start *Expr, end *Expr, step *Expr) (float64, bool) { +func getCost(start *plan.Expr, end *plan.Expr, step *plan.Expr) (float64, bool) { var startNum, endNum, stepNum float64 var flag1, flag2, flag3 bool - getInt32Val := func(e *Expr) (float64, bool) { + getInt32Val := func(e *plan.Expr) (float64, bool) { if s, ok := e.Expr.(*plan.Expr_Lit); ok { if v, ok := s.Lit.Value.(*plan.Literal_I32Val); ok && !s.Lit.Isnull { return float64(v.I32Val), true @@ -1440,7 +1440,7 @@ func getCost(start *Expr, end *Expr, step *Expr) (float64, bool) { } return 0, false } - getInt64Val := func(e *Expr) (float64, bool) { + getInt64Val := func(e *plan.Expr) (float64, bool) { if s, ok := e.Expr.(*plan.Expr_Lit); ok { if v, ok := s.Lit.Value.(*plan.Literal_I64Val); ok && !s.Lit.Isnull { return float64(v.I64Val), true @@ -1493,7 +1493,7 @@ func getCost(start *Expr, end *Expr, step *Expr) (float64, bool) { return ret, true } -func transposeTableScanFilters(proc *process.Process, qry *Query, nodeId int32) { +func transposeTableScanFilters(proc *process.Process, qry *plan.Query, nodeId int32) { node := qry.Nodes[nodeId] if node.NodeType == plan.Node_TABLE_SCAN && len(node.FilterList) > 0 { for i, e := range node.FilterList { @@ -1508,7 +1508,7 @@ func transposeTableScanFilters(proc *process.Process, qry *Query, nodeId int32) } } -func foldTableScanFilters(proc *process.Process, qry *Query, nodeId int32, foldInExpr bool) { +func foldTableScanFilters(proc *process.Process, qry *plan.Query, nodeId int32, foldInExpr bool) { node := qry.Nodes[nodeId] if node.NodeType == plan.Node_TABLE_SCAN && len(node.FilterList) > 0 { for i, e := range node.FilterList { @@ -1605,7 +1605,7 @@ func calcScanStats(node *plan.Node, builder *QueryBuilder) *plan.Stats { if builder.optimizerHints != nil { if builder.optimizerHints.blockFilter == 1 { //always trying to pushdown blockfilters if zonemappable if ExprIsZonemappable(builder.GetContext(), node.FilterList[i]) { - copyOfExpr := DeepCopyExpr(node.FilterList[i]) + copyOfExpr := plan.DeepCopyExpr(node.FilterList[i]) copyOfExpr.Selectivity = currentBlockSel blockExprList = append(blockExprList, copyOfExpr) } @@ -1614,7 +1614,7 @@ func calcScanStats(node *plan.Node, builder *QueryBuilder) *plan.Stats { } else { if currentBlockSel < 1 || strings.HasPrefix(node.TableDef.Name, catalog.IndexTableNamePrefix) { if ExprIsZonemappable(builder.GetContext(), node.FilterList[i]) { - copyOfExpr := DeepCopyExpr(node.FilterList[i]) + copyOfExpr := plan.DeepCopyExpr(node.FilterList[i]) copyOfExpr.Selectivity = currentBlockSel blockExprList = append(blockExprList, copyOfExpr) } @@ -1623,7 +1623,7 @@ func calcScanStats(node *plan.Node, builder *QueryBuilder) *plan.Stats { } else { if currentBlockSel < 1 || strings.HasPrefix(node.TableDef.Name, catalog.IndexTableNamePrefix) { if ExprIsZonemappable(builder.GetContext(), node.FilterList[i]) { - copyOfExpr := DeepCopyExpr(node.FilterList[i]) + copyOfExpr := plan.DeepCopyExpr(node.FilterList[i]) copyOfExpr.Selectivity = currentBlockSel blockExprList = append(blockExprList, copyOfExpr) } @@ -1676,7 +1676,7 @@ func shouldReturnMinimalStats(node *plan.Node) bool { return false } -func InternalTable(tableDef *TableDef) bool { +func InternalTable(tableDef *plan.TableDef) bool { switch tableDef.TblId { case catalog.MO_DATABASE_ID, catalog.MO_TABLES_ID, catalog.MO_COLUMNS_ID: return true @@ -1691,7 +1691,7 @@ func InternalTable(tableDef *TableDef) bool { } func DefaultHugeStats() *plan.Stats { - stats := new(Stats) + stats := new(plan.Stats) stats.TableCnt = 100000000 stats.Cost = 100000000 stats.Outcnt = 100000000 @@ -1703,7 +1703,7 @@ func DefaultHugeStats() *plan.Stats { } func DefaultBigStats() *plan.Stats { - stats := new(Stats) + stats := new(plan.Stats) stats.TableCnt = 10000000 stats.Cost = float64(costThresholdForOneCN + 1) stats.Outcnt = float64(costThresholdForOneCN + 1) @@ -1719,7 +1719,7 @@ func IsDefaultStats(stats *plan.Stats) bool { } func DefaultStats() *plan.Stats { - stats := new(Stats) + stats := new(plan.Stats) stats.TableCnt = 1000 stats.Cost = 1000 stats.Outcnt = 1000 @@ -1731,7 +1731,7 @@ func DefaultStats() *plan.Stats { } func DefaultMinimalStats() *plan.Stats { - stats := new(Stats) + stats := new(plan.Stats) stats.TableCnt = 100000 stats.Cost = 10 stats.Outcnt = 10 @@ -1830,7 +1830,7 @@ func (builder *QueryBuilder) hasRecursiveScan(node *plan.Node) bool { return false } -func compareStats(stats1, stats2 *Stats) bool { +func compareStats(stats1, stats2 *plan.Stats) bool { // selectivity is first considered to reduce data // when selectivity very close, we first join smaller table if math.Abs(stats1.Selectivity-stats2.Selectivity) > 0.01 { @@ -2081,35 +2081,6 @@ func PrintStats(qry *plan.Query) string { return buf.String() } -func DeepCopyStats(stats *plan.Stats) *plan.Stats { - if stats == nil { - return nil - } - var hashmapStats *plan.HashMapStats - if stats.HashmapStats != nil { - hashmapStats = &plan.HashMapStats{ - HashmapSize: stats.HashmapStats.HashmapSize, - HashOnPK: stats.HashmapStats.HashOnPK, - Shuffle: stats.HashmapStats.Shuffle, - ShuffleColIdx: stats.HashmapStats.ShuffleColIdx, - ShuffleType: stats.HashmapStats.ShuffleType, - ShuffleColMin: stats.HashmapStats.ShuffleColMin, - ShuffleColMax: stats.HashmapStats.ShuffleColMax, - ShuffleMethod: stats.HashmapStats.ShuffleMethod, - } - } - return &plan.Stats{ - BlockNum: stats.BlockNum, - Rowsize: stats.Rowsize, - Cost: stats.Cost, - Outcnt: stats.Outcnt, - TableCnt: stats.TableCnt, - Selectivity: stats.Selectivity, - HashmapStats: hashmapStats, - ForceOneCN: stats.ForceOneCN, - } -} - func getOverlap(s *pb.StatsInfo, colname string) float64 { if s == nil || s.ShuffleRangeMap[colname] == nil { return 1.0 diff --git a/pkg/sql/plan/stats_decimal_test.go b/pkg/sql/planner/stats_decimal_test.go similarity index 99% rename from pkg/sql/plan/stats_decimal_test.go rename to pkg/sql/planner/stats_decimal_test.go index 15ee1e6335c7f..83fa11dae54b4 100644 --- a/pkg/sql/plan/stats_decimal_test.go +++ b/pkg/sql/planner/stats_decimal_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "testing" diff --git a/pkg/sql/plan/stats_modulo_test.go b/pkg/sql/planner/stats_modulo_test.go similarity index 99% rename from pkg/sql/plan/stats_modulo_test.go rename to pkg/sql/planner/stats_modulo_test.go index b1996bfc9c53f..e53104e02ecad 100644 --- a/pkg/sql/plan/stats_modulo_test.go +++ b/pkg/sql/planner/stats_modulo_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "math" diff --git a/pkg/sql/plan/stats_test.go b/pkg/sql/planner/stats_test.go similarity index 98% rename from pkg/sql/plan/stats_test.go rename to pkg/sql/planner/stats_test.go index 6434eae398c0c..7d1da9eadd690 100644 --- a/pkg/sql/plan/stats_test.go +++ b/pkg/sql/planner/stats_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -20,9 +20,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/container/types" + "github.com/matrixorigin/matrixone/pkg/pb/plan" planpb "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" index2 "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/index" "github.com/stretchr/testify/require" ) @@ -527,10 +528,10 @@ func TestGetExprNdv(t *testing.T) { ColPos: 0, Name: "test_col", } - builder.tag2Table = make(map[int32]*TableDef) - builder.tag2Table[0] = &TableDef{ + builder.tag2Table = make(map[int32]*plan.TableDef) + builder.tag2Table[0] = &plan.TableDef{ Name: "test_table", - Cols: []*ColDef{ + Cols: []*plan.ColDef{ { Name: "test_col", Typ: planpb.Type{Id: int32(types.T_int64)}, diff --git a/pkg/sql/plan/system_view.go b/pkg/sql/planner/system_view.go similarity index 99% rename from pkg/sql/plan/system_view.go rename to pkg/sql/planner/system_view.go index 1736fa212a087..5ff4e49d573c4 100644 --- a/pkg/sql/plan/system_view.go +++ b/pkg/sql/planner/system_view.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" diff --git a/pkg/sql/plan/table_binder.go b/pkg/sql/planner/table_binder.go similarity index 99% rename from pkg/sql/plan/table_binder.go rename to pkg/sql/planner/table_binder.go index 103c949848f03..e504e72086367 100644 --- a/pkg/sql/plan/table_binder.go +++ b/pkg/sql/planner/table_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/plan/table_stats.go b/pkg/sql/planner/table_stats.go similarity index 99% rename from pkg/sql/plan/table_stats.go rename to pkg/sql/planner/table_stats.go index 795a0105d6f6b..ecc19f86dcc99 100644 --- a/pkg/sql/plan/table_stats.go +++ b/pkg/sql/planner/table_stats.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" diff --git a/pkg/sql/plan/tools.go b/pkg/sql/planner/tools.go similarity index 98% rename from pkg/sql/plan/tools.go rename to pkg/sql/planner/tools.go index 266bd44d1d7ff..9c85c4b983dbe 100644 --- a/pkg/sql/plan/tools.go +++ b/pkg/sql/planner/tools.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner const ( moEnumCastIndexToValueFun = "cast_index_to_value" diff --git a/pkg/sql/plan/tools/assert.go b/pkg/sql/planner/tools/assert.go similarity index 84% rename from pkg/sql/plan/tools/assert.go rename to pkg/sql/planner/tools/assert.go index 57b8d9e24ddb6..eb13657affa68 100644 --- a/pkg/sql/plan/tools/assert.go +++ b/pkg/sql/planner/tools/assert.go @@ -19,25 +19,25 @@ import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/runtime" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) func AssertPlan(ctx context.Context, sql string, pattern *MatchPattern) error { runtime.SetupServiceBasedRuntime("", runtime.DefaultRuntime()) - mock := plan.NewMockOptimizer(false) + mock := planner.NewMockOptimizer(false) one, err := parsers.ParseOne(context.Background(), dialect.MYSQL, sql, 1) if err != nil { return err } - actual, err := plan.BuildPlan(mock.CurrentContext(), one, false) + actual, err := planner.BuildPlan(mock.CurrentContext(), one, false) if err != nil { return err } query := actual.GetQuery() - if query.GetStmtType() != plan2.Query_SELECT { + if query.GetStmtType() != plan.Query_SELECT { return moerr.NewInternalError(ctx, "support select query plan only") } res, err := MatchSteps(ctx, query, pattern) diff --git a/pkg/sql/plan/tools/check.go b/pkg/sql/planner/tools/check.go similarity index 93% rename from pkg/sql/plan/tools/check.go rename to pkg/sql/planner/tools/check.go index dd3bdd7dc23e2..67c66d7a52865 100644 --- a/pkg/sql/plan/tools/check.go +++ b/pkg/sql/planner/tools/check.go @@ -19,15 +19,15 @@ import ( "strings" "github.com/matrixorigin/matrixone/pkg/common/moerr" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" ) type ExprChecker struct { Aliases UnorderedMap[string, string] } -func (checker *ExprChecker) Check(astExpr tree.Expr, expr *plan2.Expr) (bool, error) { +func (checker *ExprChecker) Check(astExpr tree.Expr, expr *plan.Expr) (bool, error) { switch exprImpl := astExpr.(type) { case *tree.NumVal: switch exprImpl.ValType { @@ -75,7 +75,7 @@ func (checker *ExprChecker) Check(astExpr tree.Expr, expr *plan2.Expr) (bool, er } } -func (checker *ExprChecker) checkFuncExpr(astExpr *tree.FuncExpr, expr *plan2.Expr) (bool, error) { +func (checker *ExprChecker) checkFuncExpr(astExpr *tree.FuncExpr, expr *plan.Expr) (bool, error) { fun := expr.GetF() if len(astExpr.Exprs) != len(fun.GetArgs()) { return false, nil @@ -98,7 +98,7 @@ func (checker *ExprChecker) checkFuncExpr(astExpr *tree.FuncExpr, expr *plan2.Ex return true, nil } -func (checker *ExprChecker) checkBinaryExpr(astExpr *tree.BinaryExpr, expr *plan2.Expr) (bool, error) { +func (checker *ExprChecker) checkBinaryExpr(astExpr *tree.BinaryExpr, expr *plan.Expr) (bool, error) { switch astExpr.Op { case tree.PLUS: fun := expr.GetF() @@ -123,7 +123,7 @@ func (checker *ExprChecker) checkBinaryExpr(astExpr *tree.BinaryExpr, expr *plan } } -func (checker *ExprChecker) checkComparisonExpr(astExpr *tree.ComparisonExpr, expr *plan2.Expr) (bool, error) { +func (checker *ExprChecker) checkComparisonExpr(astExpr *tree.ComparisonExpr, expr *plan.Expr) (bool, error) { switch astExpr.Op { case tree.EQUAL: fun := expr.GetF() diff --git a/pkg/sql/plan/tools/helper.go b/pkg/sql/planner/tools/helper.go similarity index 100% rename from pkg/sql/plan/tools/helper.go rename to pkg/sql/planner/tools/helper.go diff --git a/pkg/sql/plan/tools/match_pattern.go b/pkg/sql/planner/tools/match_pattern.go similarity index 89% rename from pkg/sql/plan/tools/match_pattern.go rename to pkg/sql/planner/tools/match_pattern.go index 03a59808444e7..7dc33131b0e7e 100644 --- a/pkg/sql/plan/tools/match_pattern.go +++ b/pkg/sql/planner/tools/match_pattern.go @@ -19,11 +19,11 @@ import ( "strings" "github.com/matrixorigin/matrixone/pkg/common/moerr" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) -func TNode(nodeType plan2.Node_NodeType, children ...*MatchPattern) *MatchPattern { +func TNode(nodeType plan.Node_NodeType, children ...*MatchPattern) *MatchPattern { return TAny(children...).With(&NodeMatcher{NodeType: nodeType}) } @@ -39,12 +39,12 @@ func TAnyTree(children ...*MatchPattern) *MatchPattern { return TAny(children...).MatchAnyTree() } -func TAnyNot(nodeType plan2.Node_NodeType, children ...*MatchPattern) *MatchPattern { +func TAnyNot(nodeType plan.Node_NodeType, children ...*MatchPattern) *MatchPattern { return TAny(children...).With(&NodeMatcher{NodeType: nodeType, Not: true}) } func TTableScanWithoutColRef(tableName string) *MatchPattern { - return TNode(plan2.Node_TABLE_SCAN).With(&TableScanMatcher{ + return TNode(plan.Node_TABLE_SCAN).With(&TableScanMatcher{ TableName: tableName, }) } @@ -74,7 +74,7 @@ func TOutput(outputs []string, child *MatchPattern) *MatchPattern { } func TOutputWithoutOutputs(child *MatchPattern) *MatchPattern { - return TNode(plan2.Node_PROJECT, child) + return TNode(plan.Node_PROJECT, child) } func TStrictOutput(outputs []string, child *MatchPattern) *MatchPattern { @@ -83,7 +83,7 @@ func TStrictOutput(outputs []string, child *MatchPattern) *MatchPattern { } func TProjectWithoutAssignments(child *MatchPattern) *MatchPattern { - return TNode(plan2.Node_PROJECT, child) + return TNode(plan.Node_PROJECT, child) } func TProject(assigns UnorderedMap[string, *ExprMatcher], child *MatchPattern) *MatchPattern { @@ -108,16 +108,16 @@ func TExpr(e string) *ExprMatcher { return NewExprMatcher(e) } -func TJoin(joinTyp plan2.Node_JoinType, +func TJoin(joinTyp plan.Node_JoinType, onConds []string, filters []string, left, right *MatchPattern) *MatchPattern { - return TNode(plan2.Node_JOIN, left, right).With( + return TNode(plan.Node_JOIN, left, right).With( NewJoinMatcher(joinTyp, onConds, filters)) } func TAggr(aggrs UnorderedMap[string, *AggrFuncMatcher], children ...*MatchPattern) *MatchPattern { - ret := TNode(plan2.Node_AGG, children...) + ret := TNode(plan.Node_AGG, children...) for k, matcher := range aggrs { ret.WithAlias(k, matcher) @@ -148,7 +148,7 @@ func (pattern *MatchPattern) WithAlias(alias string, matcher RValueMatcher) *Mat } func (pattern *MatchPattern) WithExactAssignedOutputs(expectedAliases []RValueMatcher) *MatchPattern { - fun := func(builder *plan.QueryBuilder, node *plan2.Node) []VarRef { + fun := func(builder *planner.QueryBuilder, node *plan.Node) []VarRef { ret := make([]VarRef, 0) for _, expr := range node.ProjectList { col := expr.GetCol() @@ -176,7 +176,7 @@ func (pattern *MatchPattern) WithExactAssignedOutputs(expectedAliases []RValueMa } func (pattern *MatchPattern) WithExactAssignments(expectedAliases []RValueMatcher) *MatchPattern { - fun := func(builder *plan.QueryBuilder, node *plan2.Node) []VarRef { + fun := func(builder *planner.QueryBuilder, node *plan.Node) []VarRef { ret := make([]VarRef, 0) for _, expr := range node.ProjectList { col := expr.GetCol() @@ -211,8 +211,8 @@ func (pattern *MatchPattern) WithOutputs(aliases ...string) *MatchPattern { func (pattern *MatchPattern) WithExactOutputs(outputs ...string) *MatchPattern { pattern.Matchers = append(pattern.Matchers, &SymbolsMatcher{ - GetFunc: func(builder *plan.QueryBuilder, node *plan2.Node) []VarRef { - AssertFunc(node.NodeType == plan2.Node_PROJECT, "must be project node") + GetFunc: func(builder *planner.QueryBuilder, node *plan.Node) []VarRef { + AssertFunc(node.NodeType == plan.Node_PROJECT, "must be project node") ret := make([]VarRef, 0) for _, expr := range node.ProjectList { col := expr.GetCol() @@ -232,7 +232,7 @@ func (pattern *MatchPattern) IsEnd() bool { return len(pattern.Children) == 0 } -func SimpleMatch(pattern *MatchPattern, node *plan2.Node) []*MatchingState { +func SimpleMatch(pattern *MatchPattern, node *plan.Node) []*MatchingState { states := make([]*MatchingState, 0) if pattern.AnyTree { if len(node.Children) > 1 { @@ -259,7 +259,7 @@ func SimpleMatch(pattern *MatchPattern, node *plan2.Node) []*MatchingState { return states } -func SimpleMatchMatchers(pattern *MatchPattern, node *plan2.Node) bool { +func SimpleMatchMatchers(pattern *MatchPattern, node *plan.Node) bool { for _, matcher := range pattern.Matchers { if !matcher.SimpleMatch(node) { return false @@ -270,7 +270,7 @@ func SimpleMatchMatchers(pattern *MatchPattern, node *plan2.Node) bool { func DeepMatch( ctx context.Context, - node *plan2.Node, + node *plan.Node, pattern *MatchPattern, aliases UnorderedMap[string, string]) (*MatchResult, error) { newAliases := make(UnorderedMap[string, string]) @@ -347,7 +347,7 @@ func (state *MatchingState) IsEnd() bool { return true } -func MatchSteps(ctx context.Context, query *plan2.Query, pattern *MatchPattern) (*MatchResult, error) { +func MatchSteps(ctx context.Context, query *plan.Query, pattern *MatchPattern) (*MatchResult, error) { //fmt.Println(pattern) res, err := Match(ctx, query.Nodes, query.Nodes[query.Steps[0]], pattern) if err != nil { @@ -356,7 +356,7 @@ func MatchSteps(ctx context.Context, query *plan2.Query, pattern *MatchPattern) return res, nil } -func Match(ctx context.Context, nodes []*plan2.Node, node *plan2.Node, pattern *MatchPattern) (*MatchResult, error) { +func Match(ctx context.Context, nodes []*plan.Node, node *plan.Node, pattern *MatchPattern) (*MatchResult, error) { states := SimpleMatch(pattern, node) if len(states) == 0 { return FailMatched(), nil @@ -397,8 +397,8 @@ func Match(ctx context.Context, nodes []*plan2.Node, node *plan2.Node, pattern * } func MatchChildren(ctx context.Context, - nodes []*plan2.Node, - node *plan2.Node, + nodes []*plan.Node, + node *plan.Node, state *MatchingState) (*MatchResult, error) { if len(node.Children) != len(state.Patterns) { return nil, moerr.NewInternalError(ctx, "patterns count != children count") @@ -422,7 +422,7 @@ func MatchChildren(ctx context.Context, } func MatchLeaf(ctx context.Context, - node *plan2.Node, + node *plan.Node, pattern *MatchPattern, states []*MatchingState) (*MatchResult, error) { res := FailMatched() diff --git a/pkg/sql/plan/tools/match_pattern_test.go b/pkg/sql/planner/tools/match_pattern_test.go similarity index 96% rename from pkg/sql/plan/tools/match_pattern_test.go rename to pkg/sql/planner/tools/match_pattern_test.go index 6d615ea9d03d0..0f66f19deed56 100644 --- a/pkg/sql/plan/tools/match_pattern_test.go +++ b/pkg/sql/planner/tools/match_pattern_test.go @@ -18,15 +18,14 @@ import ( "context" "testing" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/stretchr/testify/assert" - - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" ) func Test_output(t *testing.T) { p := - TNode(plan2.Node_PROJECT, - TNode(plan2.Node_TABLE_SCAN).WithAlias( + TNode(plan.Node_PROJECT, + TNode(plan.Node_TABLE_SCAN).WithAlias( "L_ORDERKEY", TColumnRef("lineitem", "l_orderkey"), ), @@ -186,7 +185,7 @@ func Test_tableScan(t *testing.T) { func Test_joinMatcher(t *testing.T) { p := TAnyTree( - TJoin(plan2.Node_INNER, + TJoin(plan.Node_INNER, []string{"LINEITEM_OK = ORDERS_OK"}, nil, TTableScanWithoutColRef("lineitem"). @@ -206,7 +205,7 @@ func Test_joinMatcher(t *testing.T) { func Test_selfJoin(t *testing.T) { p := TAnyTree( - TJoin(plan2.Node_INNER, + TJoin(plan.Node_INNER, []string{"L_ORDERS_OK = R_ORDERS_OK"}, nil, TTableScanWithoutColRef("orders"). @@ -245,8 +244,8 @@ func Test_aggr(t *testing.T) { func Test_aliasDoesNotExists(t *testing.T) { p := - TNode(plan2.Node_PROJECT, - TNode(plan2.Node_TABLE_SCAN).WithAlias("ORDERKEY", + TNode(plan.Node_PROJECT, + TNode(plan.Node_TABLE_SCAN).WithAlias("ORDERKEY", TColumnRef("lineitem", "xxxxx"))) err := AssertPlan(context.Background(), "SELECT l_orderkey FROM lineitem", p) @@ -317,7 +316,7 @@ func Test_strictProjectExtraSymbols(t *testing.T) { func Test_duplicateAliases(t *testing.T) { p := TAnyTree( - TJoin(plan2.Node_INNER, + TJoin(plan.Node_INNER, []string{"ORDERS_OK", "LINEITEM_OK"}, nil, TAnyTree( diff --git a/pkg/sql/plan/tools/matcher.go b/pkg/sql/planner/tools/matcher.go similarity index 90% rename from pkg/sql/plan/tools/matcher.go rename to pkg/sql/planner/tools/matcher.go index 075a56e646335..cd43ee369dcb9 100644 --- a/pkg/sql/plan/tools/matcher.go +++ b/pkg/sql/planner/tools/matcher.go @@ -20,11 +20,11 @@ import ( "strings" "github.com/matrixorigin/matrixone/pkg/common/moerr" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/parsers" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" ) var _ Matcher = new(NodeMatcher) @@ -40,7 +40,7 @@ var _ RValueMatcher = new(ExprMatcher) var _ RValueMatcher = new(AggrFuncMatcher) type NodeMatcher struct { - NodeType plan2.Node_NodeType + NodeType plan.Node_NodeType Not bool } @@ -51,7 +51,7 @@ func (matcher *NodeMatcher) String() string { return matcher.NodeType.String() } -func (matcher *NodeMatcher) SimpleMatch(node *plan2.Node) bool { +func (matcher *NodeMatcher) SimpleMatch(node *plan.Node) bool { if matcher.Not { return node.NodeType != matcher.NodeType } @@ -75,7 +75,7 @@ func (matcher *TableScanMatcher) String() string { } func (matcher *TableScanMatcher) SimpleMatch(node *plan.Node) bool { - return node.NodeType == plan2.Node_TABLE_SCAN + return node.NodeType == plan.Node_TABLE_SCAN } func (matcher *TableScanMatcher) DeepMatch(ctx context.Context, node *plan.Node, aliases UnorderedMap[string, string]) (*MatchResult, error) { @@ -128,8 +128,8 @@ func (matcher *ColumnRef) String() string { return fmt.Sprintf("Column %s:%s", matcher.TableName, matcher.ColumnName) } -func (matcher *ColumnRef) GetAssignedVar(node *plan2.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { - if node.NodeType != plan2.Node_TABLE_SCAN { +func (matcher *ColumnRef) GetAssignedVar(node *plan.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { + if node.NodeType != plan.Node_TABLE_SCAN { return nil, nil } @@ -148,7 +148,7 @@ func (matcher *ColumnRef) GetAssignedVar(node *plan2.Node, aliases UnorderedMap[ return nil, nil } -type GetFunc func(*plan.QueryBuilder, *plan2.Node) []VarRef +type GetFunc func(*planner.QueryBuilder, *plan.Node) []VarRef type SymbolsMatcher struct { GetFunc GetFunc @@ -228,7 +228,7 @@ func (matcher *OutputMatcher) String() string { } func (matcher *OutputMatcher) SimpleMatch(node *plan.Node) bool { - return node.NodeType == plan2.Node_PROJECT + return node.NodeType == plan.Node_PROJECT } func (matcher *OutputMatcher) DeepMatch(ctx context.Context, node *plan.Node, aliases UnorderedMap[string, string]) (*MatchResult, error) { @@ -279,9 +279,9 @@ func NewExprMatcher(sql string) *ExprMatcher { } } -func (matcher *ExprMatcher) GetAssignedVar(node *plan2.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { +func (matcher *ExprMatcher) GetAssignedVar(node *plan.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { var res *VarRef - checkedExprs := make([]*plan2.Expr, 0) + checkedExprs := make([]*plan.Expr, 0) for _, expr := range node.ProjectList { eChecker := &ExprChecker{ Aliases: aliases, @@ -322,14 +322,14 @@ func (matcher *ExprMatcher) String() string { } type JoinMatcher struct { - JoinTyp plan2.Node_JoinType + JoinTyp plan.Node_JoinType OnCondsStr []string OnConds []tree.Expr FiltersStr []string Filters []tree.Expr } -func NewJoinMatcher(joinType plan2.Node_JoinType, conds []string, filters []string) *JoinMatcher { +func NewJoinMatcher(joinType plan.Node_JoinType, conds []string, filters []string) *JoinMatcher { ret := &JoinMatcher{ JoinTyp: joinType, OnCondsStr: conds, @@ -345,12 +345,12 @@ func NewJoinMatcher(joinType plan2.Node_JoinType, conds []string, filters []stri return ret } -func (matcher *JoinMatcher) SimpleMatch(node *plan2.Node) bool { - return node.NodeType == plan2.Node_JOIN && +func (matcher *JoinMatcher) SimpleMatch(node *plan.Node) bool { + return node.NodeType == plan.Node_JOIN && node.JoinType == matcher.JoinTyp } -func (matcher *JoinMatcher) DeepMatch(ctx context.Context, node *plan2.Node, aliases UnorderedMap[string, string]) (*MatchResult, error) { +func (matcher *JoinMatcher) DeepMatch(ctx context.Context, node *plan.Node, aliases UnorderedMap[string, string]) (*MatchResult, error) { if !matcher.SimpleMatch(node) { return FailMatched(), nil } @@ -418,8 +418,8 @@ func NewAggrFuncMatcher(s string) *AggrFuncMatcher { return ret } -func (matcher *AggrFuncMatcher) GetAssignedVar(node *plan2.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { - if node.NodeType != plan2.Node_AGG { +func (matcher *AggrFuncMatcher) GetAssignedVar(node *plan.Node, aliases UnorderedMap[string, string]) (*VarRef, error) { + if node.NodeType != plan.Node_AGG { return nil, nil } var res *VarRef diff --git a/pkg/sql/plan/tools/types.go b/pkg/sql/planner/tools/types.go similarity index 91% rename from pkg/sql/plan/tools/types.go rename to pkg/sql/planner/tools/types.go index 8cd55b4ad8ca1..e106daeecfaa0 100644 --- a/pkg/sql/plan/tools/types.go +++ b/pkg/sql/planner/tools/types.go @@ -18,7 +18,7 @@ import ( "context" "strings" - plan2 "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" ) type MatchResult struct { @@ -73,21 +73,21 @@ func FailMatched() *MatchResult { type Matcher interface { // SimpleMatch check the intuitive properties about Node like type, datatype, etc. - SimpleMatch(*plan2.Node) bool + SimpleMatch(*plan.Node) bool // DeepMatch check the internal structure about Node - DeepMatch(context.Context, *plan2.Node, UnorderedMap[string, string]) (*MatchResult, error) + DeepMatch(context.Context, *plan.Node, UnorderedMap[string, string]) (*MatchResult, error) String() string } type VarRef struct { Name string - Type plan2.Type + Type plan.Type } type RValueMatcher interface { - GetAssignedVar(*plan2.Node, UnorderedMap[string, string]) (*VarRef, error) + GetAssignedVar(*plan.Node, UnorderedMap[string, string]) (*VarRef, error) String() string } diff --git a/pkg/sql/plan/tpch/ddl.sql b/pkg/sql/planner/tpch/ddl.sql similarity index 100% rename from pkg/sql/plan/tpch/ddl.sql rename to pkg/sql/planner/tpch/ddl.sql diff --git a/pkg/sql/plan/tpch/q1.sql b/pkg/sql/planner/tpch/q1.sql similarity index 100% rename from pkg/sql/plan/tpch/q1.sql rename to pkg/sql/planner/tpch/q1.sql diff --git a/pkg/sql/plan/tpch/q10.sql b/pkg/sql/planner/tpch/q10.sql similarity index 100% rename from pkg/sql/plan/tpch/q10.sql rename to pkg/sql/planner/tpch/q10.sql diff --git a/pkg/sql/plan/tpch/q11.sql b/pkg/sql/planner/tpch/q11.sql similarity index 100% rename from pkg/sql/plan/tpch/q11.sql rename to pkg/sql/planner/tpch/q11.sql diff --git a/pkg/sql/plan/tpch/q12.sql b/pkg/sql/planner/tpch/q12.sql similarity index 100% rename from pkg/sql/plan/tpch/q12.sql rename to pkg/sql/planner/tpch/q12.sql diff --git a/pkg/sql/plan/tpch/q13.sql b/pkg/sql/planner/tpch/q13.sql similarity index 100% rename from pkg/sql/plan/tpch/q13.sql rename to pkg/sql/planner/tpch/q13.sql diff --git a/pkg/sql/plan/tpch/q14.sql b/pkg/sql/planner/tpch/q14.sql similarity index 100% rename from pkg/sql/plan/tpch/q14.sql rename to pkg/sql/planner/tpch/q14.sql diff --git a/pkg/sql/plan/tpch/q15.sql b/pkg/sql/planner/tpch/q15.sql similarity index 100% rename from pkg/sql/plan/tpch/q15.sql rename to pkg/sql/planner/tpch/q15.sql diff --git a/pkg/sql/plan/tpch/q16.sql b/pkg/sql/planner/tpch/q16.sql similarity index 100% rename from pkg/sql/plan/tpch/q16.sql rename to pkg/sql/planner/tpch/q16.sql diff --git a/pkg/sql/plan/tpch/q17.sql b/pkg/sql/planner/tpch/q17.sql similarity index 100% rename from pkg/sql/plan/tpch/q17.sql rename to pkg/sql/planner/tpch/q17.sql diff --git a/pkg/sql/plan/tpch/q18.sql b/pkg/sql/planner/tpch/q18.sql similarity index 100% rename from pkg/sql/plan/tpch/q18.sql rename to pkg/sql/planner/tpch/q18.sql diff --git a/pkg/sql/plan/tpch/q19.sql b/pkg/sql/planner/tpch/q19.sql similarity index 100% rename from pkg/sql/plan/tpch/q19.sql rename to pkg/sql/planner/tpch/q19.sql diff --git a/pkg/sql/plan/tpch/q2.sql b/pkg/sql/planner/tpch/q2.sql similarity index 100% rename from pkg/sql/plan/tpch/q2.sql rename to pkg/sql/planner/tpch/q2.sql diff --git a/pkg/sql/plan/tpch/q20.sql b/pkg/sql/planner/tpch/q20.sql similarity index 100% rename from pkg/sql/plan/tpch/q20.sql rename to pkg/sql/planner/tpch/q20.sql diff --git a/pkg/sql/plan/tpch/q21.sql b/pkg/sql/planner/tpch/q21.sql similarity index 100% rename from pkg/sql/plan/tpch/q21.sql rename to pkg/sql/planner/tpch/q21.sql diff --git a/pkg/sql/plan/tpch/q22.sql b/pkg/sql/planner/tpch/q22.sql similarity index 100% rename from pkg/sql/plan/tpch/q22.sql rename to pkg/sql/planner/tpch/q22.sql diff --git a/pkg/sql/plan/tpch/q3.sql b/pkg/sql/planner/tpch/q3.sql similarity index 100% rename from pkg/sql/plan/tpch/q3.sql rename to pkg/sql/planner/tpch/q3.sql diff --git a/pkg/sql/plan/tpch/q4.sql b/pkg/sql/planner/tpch/q4.sql similarity index 100% rename from pkg/sql/plan/tpch/q4.sql rename to pkg/sql/planner/tpch/q4.sql diff --git a/pkg/sql/plan/tpch/q5.sql b/pkg/sql/planner/tpch/q5.sql similarity index 100% rename from pkg/sql/plan/tpch/q5.sql rename to pkg/sql/planner/tpch/q5.sql diff --git a/pkg/sql/plan/tpch/q6.sql b/pkg/sql/planner/tpch/q6.sql similarity index 100% rename from pkg/sql/plan/tpch/q6.sql rename to pkg/sql/planner/tpch/q6.sql diff --git a/pkg/sql/plan/tpch/q7.sql b/pkg/sql/planner/tpch/q7.sql similarity index 100% rename from pkg/sql/plan/tpch/q7.sql rename to pkg/sql/planner/tpch/q7.sql diff --git a/pkg/sql/plan/tpch/q8.sql b/pkg/sql/planner/tpch/q8.sql similarity index 100% rename from pkg/sql/plan/tpch/q8.sql rename to pkg/sql/planner/tpch/q8.sql diff --git a/pkg/sql/plan/tpch/q9.sql b/pkg/sql/planner/tpch/q9.sql similarity index 100% rename from pkg/sql/plan/tpch/q9.sql rename to pkg/sql/planner/tpch/q9.sql diff --git a/pkg/sql/plan/tpch/simple.sql b/pkg/sql/planner/tpch/simple.sql similarity index 100% rename from pkg/sql/plan/tpch/simple.sql rename to pkg/sql/planner/tpch/simple.sql diff --git a/pkg/sql/plan/tpch_test.go b/pkg/sql/planner/tpch_test.go similarity index 99% rename from pkg/sql/plan/tpch_test.go rename to pkg/sql/planner/tpch_test.go index 079b858db4114..4a02f2362f5e5 100644 --- a/pkg/sql/plan/tpch_test.go +++ b/pkg/sql/planner/tpch_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/types.go b/pkg/sql/planner/types.go similarity index 82% rename from pkg/sql/plan/types.go rename to pkg/sql/planner/types.go index 7f77e201262d9..d5d2b311acf5a 100644 --- a/pkg/sql/plan/types.go +++ b/pkg/sql/planner/types.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -22,8 +22,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -44,56 +44,25 @@ const ( AliasAfterColumn ) -type TableDefType = plan.TableDef_DefType -type TableDef = plan.TableDef -type ColDef = plan.ColDef -type ObjectRef = plan.ObjectRef -type ColRef = plan.ColRef -type Stats = plan.Stats -type Const = plan.Literal -type MaxValue = plan.MaxValue -type Expr = plan.Expr -type Node = plan.Node -type RowsetData = plan.RowsetData -type Query = plan.Query -type Plan = plan.Plan -type Type = plan.Type -type Plan_Query = plan.Plan_Query -type Property = plan.Property -type TableDef_DefType_Properties = plan.TableDef_DefType_Properties -type PropertiesDef = plan.PropertiesDef -type ViewDef = plan.ViewDef -type ClusterByDef = plan.ClusterByDef -type OrderBySpec = plan.OrderBySpec -type FkColName = plan.FkColName -type ForeignKeyDef = plan.ForeignKeyDef -type ClusterTable = plan.ClusterTable -type PrimaryKeyDef = plan.PrimaryKeyDef -type IndexDef = plan.IndexDef -type SubscriptionMeta = plan.SubscriptionMeta -type Snapshot = plan.Snapshot -type SnapshotTenant = plan.SnapshotTenant -type ExternAttr = plan.ExternAttr - type CompilerContext interface { // Default database/schema in context DefaultDatabase() string // check if database exist - DatabaseExists(name string, snapshot *Snapshot) bool + DatabaseExists(name string, snapshot *plan.Snapshot) bool // get table definition by database/schema - Resolve(schemaName string, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) + Resolve(schemaName string, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) // get index table definition by an ObjectRef, will skip unnecessary subscription check - ResolveIndexTableByRef(ref *ObjectRef, tblName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) + ResolveIndexTableByRef(ref *plan.ObjectRef, tblName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) // get table definition by table id - ResolveById(tableId uint64, snapshot *Snapshot) (*ObjectRef, *TableDef, error) + ResolveById(tableId uint64, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) // get the value of variable ResolveVariable(varName string, isSystemVar, isGlobalVar bool) (interface{}, error) // get the list of the account id ResolveAccountIds(accountNames []string) ([]uint32, error) // get the relevant information of udf - ResolveUdf(name string, args []*Expr) (*function.Udf, error) + ResolveUdf(name string, args []*plan.Expr) (*function.Udf, error) // get needed info for stats by table, NOTE: Stats May indirectly access the file service - Stats(obj *ObjectRef, snapshot *Snapshot) (*pb.StatsInfo, error) + Stats(obj *plan.ObjectRef, snapshot *plan.Snapshot) (*pb.StatsInfo, error) // get origin sql string of the root GetRootSql() string // get username of current session @@ -106,25 +75,25 @@ type CompilerContext interface { // SetContext set raw context.Context SetContext(ctx context.Context) // GetDatabaseId Get database id - GetDatabaseId(dbName string, snapshot *Snapshot) (uint64, error) + GetDatabaseId(dbName string, snapshot *plan.Snapshot) (uint64, error) GetProcess() *process.Process - GetQueryResultMeta(uuid string) ([]*ColDef, string, error) + GetQueryResultMeta(uuid string) ([]*plan.ColDef, string, error) SetBuildingAlterView(yesOrNo bool, dbName, viewName string) // is building the alter view or not // return: yes or no, dbName, viewName GetBuildingAlterView() (bool, string, string) GetStatsCache() *StatsCache - GetSubscriptionMeta(dbName string, snapshot *Snapshot) (*SubscriptionMeta, error) + GetSubscriptionMeta(dbName string, snapshot *plan.Snapshot) (*plan.SubscriptionMeta, error) CheckSubscriptionValid(subName, accName string, pubName string) error - SetQueryingSubscription(meta *SubscriptionMeta) - GetQueryingSubscription() *SubscriptionMeta + SetQueryingSubscription(meta *plan.SubscriptionMeta) + GetQueryingSubscription() *plan.SubscriptionMeta IsPublishing(dbName string) (bool, error) - BuildTableDefByMoColumns(dbName, table string) (*TableDef, error) - ResolveSubscriptionTableById(tableId uint64, pubmeta *SubscriptionMeta) (*ObjectRef, *TableDef, error) + BuildTableDefByMoColumns(dbName, table string) (*plan.TableDef, error) + ResolveSubscriptionTableById(tableId uint64, pubmeta *plan.SubscriptionMeta) (*plan.ObjectRef, *plan.TableDef, error) - ResolveSnapshotWithSnapshotName(snapshotName string) (*Snapshot, error) + ResolveSnapshotWithSnapshotName(snapshotName string) (*plan.Snapshot, error) CheckTimeStampValid(ts int64) (bool, error) //InitExecuteStmtParam replaces the plan of the EXECUTE by the plan generated by the PREPARE. @@ -133,8 +102,8 @@ type CompilerContext interface { // the statement generated by the PREPARE InitExecuteStmtParam(execPlan *plan.Execute) (*plan.Plan, tree.Statement, error) - GetSnapshot() *Snapshot - SetSnapshot(snapshot *Snapshot) + GetSnapshot() *plan.Snapshot + SetSnapshot(snapshot *plan.Snapshot) GetViews() []string SetViews(views []string) @@ -142,18 +111,18 @@ type CompilerContext interface { } type Optimizer interface { - Optimize(stmt tree.Statement) (*Query, error) + Optimize(stmt tree.Statement) (*plan.Query, error) CurrentContext() CompilerContext } type Rule interface { - Match(*Node) bool // rule match? - Apply(*Node, *Query, *process.Process) // apply the rule + Match(*plan.Node) bool // rule match? + Apply(*plan.Node, *plan.Query, *process.Process) // apply the rule } // BaseOptimizer is base optimizer, capable of handling only a few simple rules type BaseOptimizer struct { - qry *Query + qry *plan.Query rules []Rule ctx CompilerContext } @@ -172,7 +141,7 @@ type QueryBuilder struct { protectedScans map[int32]int projectSpecialGuards map[int32]*specialIndexGuard - tag2Table map[int32]*TableDef + tag2Table map[int32]*plan.TableDef tag2NodeID map[int32]int32 nextBindTag int32 @@ -227,7 +196,7 @@ type CTERef struct { isRecursive bool ast *tree.CTE maskedCTEs map[string]bool - snapshot *Snapshot + snapshot *plan.Snapshot } type CteBindState struct { @@ -330,7 +299,7 @@ type BindContext struct { // Used to generate a Sort node before the Agg node instead of using window function. groupConcatOrderBys []*plan.OrderBySpec - snapshot *Snapshot + snapshot *plan.Snapshot // all view keys(dbName#viewName) views []string //view in binding or already bound @@ -385,13 +354,13 @@ type baseBinder struct { type DefaultBinder struct { baseBinder - typ Type + typ plan.Type cols []string } type UpdateBinder struct { baseBinder - cols []*ColDef + cols []*plan.ColDef } type OndupUpdateBinder struct { @@ -484,7 +453,7 @@ const ( // for Decimal type, we need colDef to get the scale type OriginTableMessageForFuzzy struct { ParentTableName string - ParentUniqueCols []*ColDef + ParentUniqueCols []*plan.ColDef } type MultiTableIndex struct { diff --git a/pkg/sql/plan/types_mock_test.go b/pkg/sql/planner/types_mock_test.go similarity index 93% rename from pkg/sql/plan/types_mock_test.go rename to pkg/sql/planner/types_mock_test.go index 3587aee7499f4..fb6034159ecce 100644 --- a/pkg/sql/plan/types_mock_test.go +++ b/pkg/sql/planner/types_mock_test.go @@ -13,10 +13,10 @@ // limitations under the License. // Code generated by MockGen. DO NOT EDIT. -// Source: ../../../pkg/sql/plan/types.go +// Source: ../../../pkg/sql/planner/types.go -// Package plan is a generated GoMock package. -package plan +// Package planner is a generated GoMock package. +package planner import ( context "context" @@ -25,8 +25,8 @@ import ( gomock "github.com/golang/mock/gomock" plan "github.com/matrixorigin/matrixone/pkg/pb/plan" statsinfo "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" + function "github.com/matrixorigin/matrixone/pkg/sql/function" tree "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - function "github.com/matrixorigin/matrixone/pkg/sql/plan/function" process "github.com/matrixorigin/matrixone/pkg/vm/process" ) @@ -54,10 +54,10 @@ func (m *MockCompilerContext2) EXPECT() *MockCompilerContext2MockRecorder { } // BuildTableDefByMoColumns mocks base method. -func (m *MockCompilerContext2) BuildTableDefByMoColumns(dbName, table string) (*TableDef, error) { +func (m *MockCompilerContext2) BuildTableDefByMoColumns(dbName, table string) (*plan.TableDef, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "BuildTableDefByMoColumns", dbName, table) - ret0, _ := ret[0].(*TableDef) + ret0, _ := ret[0].(*plan.TableDef) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -98,7 +98,7 @@ func (mr *MockCompilerContext2MockRecorder) CheckTimeStampValid(ts interface{}) } // DatabaseExists mocks base method. -func (m *MockCompilerContext2) DatabaseExists(name string, snapshot *Snapshot) bool { +func (m *MockCompilerContext2) DatabaseExists(name string, snapshot *plan.Snapshot) bool { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DatabaseExists", name, snapshot) ret0, _ := ret[0].(bool) @@ -134,16 +134,17 @@ func (m *MockCompilerContext2) GetAccountId() (uint32, error) { return ret0, ret1 } -func (m *MockCompilerContext2) GetAccountName() string { - return "" -} - // GetAccountId indicates an expected call of GetAccountId. func (mr *MockCompilerContext2MockRecorder) GetAccountId() *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountId", reflect.TypeOf((*MockCompilerContext2)(nil).GetAccountId)) } +// GetAccountName mocks base method. +func (m *MockCompilerContext2) GetAccountName() string { + return "" +} + // GetBuildingAlterView mocks base method. func (m *MockCompilerContext2) GetBuildingAlterView() (bool, string, string) { m.ctrl.T.Helper() @@ -175,7 +176,7 @@ func (mr *MockCompilerContext2MockRecorder) GetContext() *gomock.Call { } // GetDatabaseId mocks base method. -func (m *MockCompilerContext2) GetDatabaseId(dbName string, snapshot *Snapshot) (uint64, error) { +func (m *MockCompilerContext2) GetDatabaseId(dbName string, snapshot *plan.Snapshot) (uint64, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetDatabaseId", dbName, snapshot) ret0, _ := ret[0].(uint64) @@ -218,10 +219,10 @@ func (mr *MockCompilerContext2MockRecorder) GetProcess() *gomock.Call { } // GetQueryResultMeta mocks base method. -func (m *MockCompilerContext2) GetQueryResultMeta(uuid string) ([]*ColDef, string, error) { +func (m *MockCompilerContext2) GetQueryResultMeta(uuid string) ([]*plan.ColDef, string, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetQueryResultMeta", uuid) - ret0, _ := ret[0].([]*ColDef) + ret0, _ := ret[0].([]*plan.ColDef) ret1, _ := ret[1].(string) ret2, _ := ret[2].(error) return ret0, ret1, ret2 @@ -234,10 +235,10 @@ func (mr *MockCompilerContext2MockRecorder) GetQueryResultMeta(uuid interface{}) } // GetQueryingSubscription mocks base method. -func (m *MockCompilerContext2) GetQueryingSubscription() *SubscriptionMeta { +func (m *MockCompilerContext2) GetQueryingSubscription() *plan.SubscriptionMeta { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetQueryingSubscription") - ret0, _ := ret[0].(*SubscriptionMeta) + ret0, _ := ret[0].(*plan.SubscriptionMeta) return ret0 } @@ -262,10 +263,10 @@ func (mr *MockCompilerContext2MockRecorder) GetRootSql() *gomock.Call { } // GetSnapshot mocks base method. -func (m *MockCompilerContext2) GetSnapshot() *Snapshot { +func (m *MockCompilerContext2) GetSnapshot() *plan.Snapshot { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetSnapshot") - ret0, _ := ret[0].(*Snapshot) + ret0, _ := ret[0].(*plan.Snapshot) return ret0 } @@ -290,10 +291,10 @@ func (mr *MockCompilerContext2MockRecorder) GetStatsCache() *gomock.Call { } // GetSubscriptionMeta mocks base method. -func (m *MockCompilerContext2) GetSubscriptionMeta(dbName string, snapshot *Snapshot) (*SubscriptionMeta, error) { +func (m *MockCompilerContext2) GetSubscriptionMeta(dbName string, snapshot *plan.Snapshot) (*plan.SubscriptionMeta, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetSubscriptionMeta", dbName, snapshot) - ret0, _ := ret[0].(*SubscriptionMeta) + ret0, _ := ret[0].(*plan.SubscriptionMeta) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -364,11 +365,11 @@ func (mr *MockCompilerContext2MockRecorder) IsPublishing(dbName interface{}) *go } // Resolve mocks base method. -func (m *MockCompilerContext2) Resolve(schemaName, tableName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext2) Resolve(schemaName, tableName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Resolve", schemaName, tableName, snapshot) - ret0, _ := ret[0].(*ObjectRef) - ret1, _ := ret[1].(*TableDef) + ret0, _ := ret[0].(*plan.ObjectRef) + ret1, _ := ret[1].(*plan.TableDef) ret2, _ := ret[2].(error) return ret0, ret1, ret2 } @@ -395,11 +396,11 @@ func (mr *MockCompilerContext2MockRecorder) ResolveAccountIds(accountNames inter } // ResolveById mocks base method. -func (m *MockCompilerContext2) ResolveById(tableId uint64, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext2) ResolveById(tableId uint64, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ResolveById", tableId, snapshot) - ret0, _ := ret[0].(*ObjectRef) - ret1, _ := ret[1].(*TableDef) + ret0, _ := ret[0].(*plan.ObjectRef) + ret1, _ := ret[1].(*plan.TableDef) ret2, _ := ret[2].(error) return ret0, ret1, ret2 } @@ -411,11 +412,11 @@ func (mr *MockCompilerContext2MockRecorder) ResolveById(tableId, snapshot interf } // ResolveIndexTableByRef mocks base method. -func (m *MockCompilerContext2) ResolveIndexTableByRef(ref *ObjectRef, tblName string, snapshot *Snapshot) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext2) ResolveIndexTableByRef(ref *plan.ObjectRef, tblName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ResolveIndexTableByRef", ref, tblName, snapshot) - ret0, _ := ret[0].(*ObjectRef) - ret1, _ := ret[1].(*TableDef) + ret0, _ := ret[0].(*plan.ObjectRef) + ret1, _ := ret[1].(*plan.TableDef) ret2, _ := ret[2].(error) return ret0, ret1, ret2 } @@ -427,10 +428,10 @@ func (mr *MockCompilerContext2MockRecorder) ResolveIndexTableByRef(ref, tblName, } // ResolveSnapshotWithSnapshotName mocks base method. -func (m *MockCompilerContext2) ResolveSnapshotWithSnapshotName(snapshotName string) (*Snapshot, error) { +func (m *MockCompilerContext2) ResolveSnapshotWithSnapshotName(snapshotName string) (*plan.Snapshot, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ResolveSnapshotWithSnapshotName", snapshotName) - ret0, _ := ret[0].(*Snapshot) + ret0, _ := ret[0].(*plan.Snapshot) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -442,11 +443,11 @@ func (mr *MockCompilerContext2MockRecorder) ResolveSnapshotWithSnapshotName(snap } // ResolveSubscriptionTableById mocks base method. -func (m *MockCompilerContext2) ResolveSubscriptionTableById(tableId uint64, pubmeta *SubscriptionMeta) (*ObjectRef, *TableDef, error) { +func (m *MockCompilerContext2) ResolveSubscriptionTableById(tableId uint64, pubmeta *plan.SubscriptionMeta) (*plan.ObjectRef, *plan.TableDef, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ResolveSubscriptionTableById", tableId, pubmeta) - ret0, _ := ret[0].(*ObjectRef) - ret1, _ := ret[1].(*TableDef) + ret0, _ := ret[0].(*plan.ObjectRef) + ret1, _ := ret[1].(*plan.TableDef) ret2, _ := ret[2].(error) return ret0, ret1, ret2 } @@ -458,7 +459,7 @@ func (mr *MockCompilerContext2MockRecorder) ResolveSubscriptionTableById(tableId } // ResolveUdf mocks base method. -func (m *MockCompilerContext2) ResolveUdf(name string, args []*Expr) (*function.Udf, error) { +func (m *MockCompilerContext2) ResolveUdf(name string, args []*plan.Expr) (*function.Udf, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ResolveUdf", name, args) ret0, _ := ret[0].(*function.Udf) @@ -512,7 +513,7 @@ func (mr *MockCompilerContext2MockRecorder) SetContext(ctx interface{}) *gomock. } // SetQueryingSubscription mocks base method. -func (m *MockCompilerContext2) SetQueryingSubscription(meta *SubscriptionMeta) { +func (m *MockCompilerContext2) SetQueryingSubscription(meta *plan.SubscriptionMeta) { m.ctrl.T.Helper() m.ctrl.Call(m, "SetQueryingSubscription", meta) } @@ -524,7 +525,7 @@ func (mr *MockCompilerContext2MockRecorder) SetQueryingSubscription(meta interfa } // SetSnapshot mocks base method. -func (m *MockCompilerContext2) SetSnapshot(snapshot *Snapshot) { +func (m *MockCompilerContext2) SetSnapshot(snapshot *plan.Snapshot) { m.ctrl.T.Helper() m.ctrl.Call(m, "SetSnapshot", snapshot) } @@ -548,7 +549,7 @@ func (mr *MockCompilerContext2MockRecorder) SetViews(views interface{}) *gomock. } // Stats mocks base method. -func (m *MockCompilerContext2) Stats(obj *ObjectRef, snapshot *Snapshot) (*statsinfo.StatsInfo, error) { +func (m *MockCompilerContext2) Stats(obj *plan.ObjectRef, snapshot *plan.Snapshot) (*statsinfo.StatsInfo, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Stats", obj, snapshot) ret0, _ := ret[0].(*statsinfo.StatsInfo) @@ -600,10 +601,10 @@ func (mr *MockOptimizer2MockRecorder) CurrentContext() *gomock.Call { } // Optimize mocks base method. -func (m *MockOptimizer2) Optimize(stmt tree.Statement) (*Query, error) { +func (m *MockOptimizer2) Optimize(stmt tree.Statement) (*plan.Query, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Optimize", stmt) - ret0, _ := ret[0].(*Query) + ret0, _ := ret[0].(*plan.Query) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -638,7 +639,7 @@ func (m *MockRule) EXPECT() *MockRuleMockRecorder { } // Apply mocks base method. -func (m *MockRule) Apply(arg0 *Node, arg1 *Query, arg2 *process.Process) { +func (m *MockRule) Apply(arg0 *plan.Node, arg1 *plan.Query, arg2 *process.Process) { m.ctrl.T.Helper() m.ctrl.Call(m, "Apply", arg0, arg1, arg2) } @@ -650,7 +651,7 @@ func (mr *MockRuleMockRecorder) Apply(arg0, arg1, arg2 interface{}) *gomock.Call } // Match mocks base method. -func (m *MockRule) Match(arg0 *Node) bool { +func (m *MockRule) Match(arg0 *plan.Node) bool { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Match", arg0) ret0, _ := ret[0].(bool) diff --git a/pkg/sql/plan/unnest.go b/pkg/sql/planner/unnest.go similarity index 97% rename from pkg/sql/plan/unnest.go rename to pkg/sql/planner/unnest.go index 0964c2a5f8bff..ddbe15aa066e5 100644 --- a/pkg/sql/plan/unnest.go +++ b/pkg/sql/planner/unnest.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/container/types" @@ -81,7 +81,7 @@ var ( ) func (builder *QueryBuilder) buildUnnest(tbl *tree.TableFunction, ctx *BindContext, exprs []*plan.Expr, children []int32) (int32, error) { - colDefs := DeepCopyColDefList(defaultColDefs) + colDefs := plan.DeepCopyColDefList(defaultColDefs) colName := findColName(tbl.Func) node := &plan.Node{ NodeType: plan.Node_FUNCTION_SCAN, diff --git a/pkg/sql/plan/update_binder.go b/pkg/sql/planner/update_binder.go similarity index 96% rename from pkg/sql/plan/update_binder.go rename to pkg/sql/planner/update_binder.go index 8863239dd33df..ddd7cf9cc6e1e 100644 --- a/pkg/sql/plan/update_binder.go +++ b/pkg/sql/planner/update_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -23,7 +23,7 @@ import ( ) // use for on duplicate key update clause: eg: insert into t1 values(1,1),(2,2) on duplicate key update a = a + abs(b), b = values(b)-2 -func NewUpdateBinder(sysCtx context.Context, builder *QueryBuilder, ctx *BindContext, cols []*ColDef) *UpdateBinder { +func NewUpdateBinder(sysCtx context.Context, builder *QueryBuilder, ctx *BindContext, cols []*plan.ColDef) *UpdateBinder { b := &UpdateBinder{cols: cols} b.sysCtx = sysCtx b.builder = builder @@ -47,7 +47,7 @@ func (b *UpdateBinder) BindColRef(astExpr *tree.UnresolvedName, depth int32, isR func (b *UpdateBinder) bindColRef(astExpr *tree.UnresolvedName, _ int32, _ bool) (expr *plan.Expr, err error) { col := astExpr.ColName() idx := -1 - var typ *Type + var typ *plan.Type for i, c := range b.cols { if c.Name == col { idx = i diff --git a/pkg/sql/plan/utils.go b/pkg/sql/planner/utils.go similarity index 94% rename from pkg/sql/plan/utils.go rename to pkg/sql/planner/utils.go index 84775261d1f99..6a19ae6f8d0e1 100644 --- a/pkg/sql/plan/utils.go +++ b/pkg/sql/planner/utils.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "bytes" @@ -36,11 +36,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect" "github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect/mysql" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/stage" "github.com/matrixorigin/matrixone/pkg/stage/stageutil" @@ -82,20 +82,10 @@ func hasParam(expr *plan.Expr) bool { return true case *plan.Expr_F: - for _, arg := range exprImpl.F.Args { - if hasParam(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.F.Args, hasParam) case *plan.Expr_List: - for _, arg := range exprImpl.List.List { - if hasParam(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.List.List, hasParam) default: return false @@ -108,20 +98,10 @@ func hasCorrCol(expr *plan.Expr) bool { return true case *plan.Expr_F: - for _, arg := range exprImpl.F.Args { - if hasCorrCol(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.F.Args, hasCorrCol) case *plan.Expr_List: - for _, arg := range exprImpl.List.List { - if hasCorrCol(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.List.List, hasCorrCol) default: return false @@ -134,20 +114,10 @@ func hasSubquery(expr *plan.Expr) bool { return true case *plan.Expr_F: - for _, arg := range exprImpl.F.Args { - if hasSubquery(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.F.Args, hasSubquery) case *plan.Expr_List: - for _, arg := range exprImpl.List.List { - if hasSubquery(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.List.List, hasSubquery) default: return false @@ -160,20 +130,14 @@ func HasTag(expr *plan.Expr, tag int32) bool { return exprImpl.Col.RelPos == tag case *plan.Expr_F: - for _, arg := range exprImpl.F.Args { - if HasTag(arg, tag) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.F.Args, func(arg *plan.Expr) bool { + return HasTag(arg, tag) + }) case *plan.Expr_List: - for _, arg := range exprImpl.List.List { - if HasTag(arg, tag) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.List.List, func(arg *plan.Expr) bool { + return HasTag(arg, tag) + }) default: return false @@ -273,7 +237,7 @@ func replaceColRefs(expr *plan.Expr, tag int32, projects []*plan.Expr) *plan.Exp case *plan.Expr_Col: colRef := exprImpl.Col if colRef.RelPos == tag { - expr = DeepCopyExpr(projects[colRef.ColPos]) + expr = plan.DeepCopyExpr(projects[colRef.ColPos]) } case *plan.Expr_W: replaceColRefs(exprImpl.W.WindowFunc, tag, projects) @@ -296,7 +260,7 @@ func replaceColRefsForSet(expr *plan.Expr, projects []*plan.Expr) *plan.Expr { } case *plan.Expr_Col: - expr = DeepCopyExpr(projects[exprImpl.Col.ColPos]) + expr = plan.DeepCopyExpr(projects[exprImpl.Col.ColPos]) } return expr @@ -572,7 +536,7 @@ func deduceNewFilterList(filters, onList []*plan.Expr) []*plan.Expr { for _, filter := range filters { col := extractColRefInFilter(filter) if col != nil { - newExpr := DeepCopyExpr(filter) + newExpr := plan.DeepCopyExpr(filter) if substituteMatchColumn(newExpr, col1, col2) { newFilters = append(newFilters, newExpr) } @@ -603,7 +567,7 @@ func canMergeToBetweenAnd(expr1, expr2 *plan.Expr) bool { return false } -func extractColRefAndLiteralsInFilter(expr *plan.Expr) (col *ColRef, litType types.T, literals []*Const, colFnName string, hasDynamicParam bool) { +func extractColRefAndLiteralsInFilter(expr *plan.Expr) (col *plan.ColRef, litType types.T, literals []*plan.Literal, colFnName string, hasDynamicParam bool) { fn := expr.GetF() if fn == nil || len(fn.Args) == 0 { return @@ -636,11 +600,11 @@ func extractColRefAndLiteralsInFilter(expr *plan.Expr) (col *ColRef, litType typ return } litType = types.T(fn.Args[0].Typ.Id) - literals = []*Const{lit} + literals = []*plan.Literal{lit} - case "between": + case "between", "in_range": litType = types.T(fn.Args[0].Typ.Id) - literals = []*Const{fn.Args[1].GetLit(), fn.Args[2].GetLit()} + literals = []*plan.Literal{fn.Args[1].GetLit(), fn.Args[2].GetLit()} } return @@ -672,7 +636,7 @@ func extractColRefAndLiteralsInFilter(expr *plan.Expr) (col *ColRef, litType typ // // Returns the column reference if the expression contains exactly one unique column reference, // nil otherwise. -func extractColRefInFilter(expr *plan.Expr) *ColRef { +func extractColRefInFilter(expr *plan.Expr) *plan.ColRef { switch exprImpl := expr.Expr.(type) { case *plan.Expr_Col: return exprImpl.Col @@ -709,12 +673,12 @@ func extractColRefInFilter(expr *plan.Expr) *ColRef { // for col1=col2 and col3 = col4, trying to deduce new pred // for example , if col1 and col3 are the same, then we can deduce that col2=col4 -func deduceTranstivity(expr *plan.Expr, col1, col2, col3, col4 *ColRef) (bool, *plan.Expr) { +func deduceTranstivity(expr *plan.Expr, col1, col2, col3, col4 *plan.ColRef) (bool, *plan.Expr) { if col1.ColRefString() == col3.ColRefString() || col1.ColRefString() == col4.ColRefString() || col2.ColRefString() == col3.ColRefString() || col2.ColRefString() == col4.ColRefString() { - retExpr := DeepCopyExpr(expr) + retExpr := plan.DeepCopyExpr(expr) substituteMatchColumn(retExpr, col3, col4) return true, retExpr } @@ -722,7 +686,7 @@ func deduceTranstivity(expr *plan.Expr, col1, col2, col3, col4 *ColRef) (bool, * } // if match col1 in expr, substitute it to col2. and othterwise -func substituteMatchColumn(expr *plan.Expr, onPredCol1, onPredCol2 *ColRef) bool { +func substituteMatchColumn(expr *plan.Expr, onPredCol1, onPredCol2 *plan.ColRef) bool { var ret bool switch exprImpl := expr.Expr.(type) { case *plan.Expr_Col: @@ -748,7 +712,7 @@ func substituteMatchColumn(expr *plan.Expr, onPredCol1, onPredCol2 *ColRef) bool return ret } -func checkStrictJoinPred(onPred *plan.Expr) (bool, *ColRef, *ColRef) { +func checkStrictJoinPred(onPred *plan.Expr) (bool, *plan.ColRef, *plan.ColRef) { //onPred must be equality, children must be column name switch onPredImpl := onPred.Expr.(type) { case *plan.Expr_F: @@ -756,7 +720,7 @@ func checkStrictJoinPred(onPred *plan.Expr) (bool, *ColRef, *ColRef) { return false, nil, nil } args := onPredImpl.F.Args - var col1, col2 *ColRef + var col1, col2 *plan.ColRef switch child1 := args[0].Expr.(type) { case *plan.Expr_Col: col1 = child1.Col @@ -817,7 +781,7 @@ func rejectsNull(filter *plan.Expr, proc *process.Process) bool { return true // in is always null rejecting } - filter = replaceColRefWithNull(DeepCopyExpr(filter)) + filter = replaceColRefWithNull(plan.DeepCopyExpr(filter)) filter, err := ConstantFold(batch.EmptyForConstFoldBatch, filter, proc, false, true) if err != nil { @@ -1462,8 +1426,8 @@ func unwindTupleComparison(ctx context.Context, nonEqOp, op string, leftExprs, r } expr, err := BindFuncExprImplByPlanExpr(ctx, nonEqOp, []*plan.Expr{ - DeepCopyExpr(leftExprs[idx]), - DeepCopyExpr(rightExprs[idx]), + plan.DeepCopyExpr(leftExprs[idx]), + plan.DeepCopyExpr(rightExprs[idx]), }) if err != nil { return nil, err @@ -2120,7 +2084,7 @@ func ReadDir(param *tree.ExternParam) (fileList []string, fileSize []int64, err // GetUniqueColAndIdxFromTableDef // if get table: t1(a int primary key, b int, c int, d int, unique key(b,c)); // return : []map[string]int { {'a'=1}, {'b'=2,'c'=3} } -func GetUniqueColAndIdxFromTableDef(tableDef *TableDef) ([]map[string]int, map[string]bool) { +func GetUniqueColAndIdxFromTableDef(tableDef *plan.TableDef) ([]map[string]int, map[string]bool) { uniqueCols := make([]map[string]int, 0, len(tableDef.Cols)) uniqueColNames := make(map[string]bool) if tableDef.Pkey != nil && !onlyHasHiddenPrimaryKey(tableDef) { @@ -2149,16 +2113,16 @@ func GetUniqueColAndIdxFromTableDef(tableDef *TableDef) ([]map[string]int, map[s // if get table: t1(a int primary key, b int, c int, d int, unique key(b,c)); // uniqueCols is: []map[string]int { {'a'=1}, {'b'=2,'c'=3} } // we will get expr like: 'leftTag.a = rightTag.a or (leftTag.b = rightTag.b and leftTag.c = rightTag. c) -func GenUniqueColJoinExpr(ctx context.Context, tableDef *TableDef, uniqueCols []map[string]int, leftTag int32, rightTag int32) (*Expr, error) { - var checkExpr *Expr +func GenUniqueColJoinExpr(ctx context.Context, tableDef *plan.TableDef, uniqueCols []map[string]int, leftTag int32, rightTag int32) (*plan.Expr, error) { + var checkExpr *plan.Expr var err error for i, uniqueColMap := range uniqueCols { - var condExpr *Expr + var condExpr *plan.Expr condIdx := int(0) for _, colIdx := range uniqueColMap { col := tableDef.Cols[colIdx] - leftExpr := &Expr{ + leftExpr := &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2176,14 +2140,14 @@ func GenUniqueColJoinExpr(ctx context.Context, tableDef *TableDef, uniqueCols [] }, }, } - eqExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*Expr{leftExpr, rightExpr}) + eqExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return nil, err } if condIdx == 0 { condExpr = eqExpr } else { - condExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*Expr{condExpr, eqExpr}) + condExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*plan.Expr{condExpr, eqExpr}) if err != nil { return nil, err } @@ -2194,7 +2158,7 @@ func GenUniqueColJoinExpr(ctx context.Context, tableDef *TableDef, uniqueCols [] if i == 0 { checkExpr = condExpr } else { - checkExpr, err = BindFuncExprImplByPlanExpr(ctx, "or", []*Expr{checkExpr, condExpr}) + checkExpr, err = BindFuncExprImplByPlanExpr(ctx, "or", []*plan.Expr{checkExpr, condExpr}) if err != nil { return nil, err } @@ -2208,16 +2172,16 @@ func GenUniqueColJoinExpr(ctx context.Context, tableDef *TableDef, uniqueCols [] // if get table: t1(a int primary key, b int, c int, d int, unique key(b,c)); // we get batch like [1,2,3,4, origin_a, origin_b, origin_c, origin_d, row_id ....]。 // we get expr like: []*Expr{ 1=origin_a , (2 = origin_b and 3 = origin_c) } -func GenUniqueColCheckExpr(ctx context.Context, tableDef *TableDef, uniqueCols []map[string]int, colCount int) ([]*Expr, error) { - checkExpr := make([]*Expr, len(uniqueCols)) +func GenUniqueColCheckExpr(ctx context.Context, tableDef *plan.TableDef, uniqueCols []map[string]int, colCount int) ([]*plan.Expr, error) { + checkExpr := make([]*plan.Expr, len(uniqueCols)) for i, uniqueColMap := range uniqueCols { - var condExpr *Expr + var condExpr *plan.Expr condIdx := int(0) for _, colIdx := range uniqueColMap { col := tableDef.Cols[colIdx] // insert values - leftExpr := &Expr{ + leftExpr := &plan.Expr{ Typ: col.Typ, Expr: &plan.Expr_Col{ Col: &plan.ColRef{ @@ -2235,14 +2199,14 @@ func GenUniqueColCheckExpr(ctx context.Context, tableDef *TableDef, uniqueCols [ }, }, } - eqExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*Expr{leftExpr, rightExpr}) + eqExpr, err := BindFuncExprImplByPlanExpr(ctx, "=", []*plan.Expr{leftExpr, rightExpr}) if err != nil { return nil, err } if condIdx == 0 { condExpr = eqExpr } else { - condExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*Expr{condExpr, eqExpr}) + condExpr, err = BindFuncExprImplByPlanExpr(ctx, "and", []*plan.Expr{condExpr, eqExpr}) if err != nil { return nil, err } @@ -2254,7 +2218,7 @@ func GenUniqueColCheckExpr(ctx context.Context, tableDef *TableDef, uniqueCols [ return checkExpr, nil } -func onlyContainsTag(filter *Expr, tag int32) bool { +func onlyContainsTag(filter *plan.Expr, tag int32) bool { switch ex := filter.Expr.(type) { case *plan.Expr_Col: return ex.Col.RelPos == tag @@ -2416,7 +2380,7 @@ func doFormatExprInConsole(expr *plan.Expr, out *bytes.Buffer, depth int, option } // databaseIsValid checks whether the database exists or not. -func databaseIsValid(dbName string, ctx CompilerContext, snapshot *Snapshot) (string, error) { +func databaseIsValid(dbName string, ctx CompilerContext, snapshot *plan.Snapshot) (string, error) { connectDBFirst := false if len(dbName) == 0 { connectDBFirst = true @@ -2476,16 +2440,11 @@ func detectedExprWhetherTimeRelated(expr *plan.Expr) bool { } // current_timestamp() + 1 - for _, arg := range ef.F.Args { - if detectedExprWhetherTimeRelated(arg) { - return true - } - } + return slices.ContainsFunc(ef.F.Args, detectedExprWhetherTimeRelated) } - return false } -func ResetPreparePlan(ctx CompilerContext, preparePlan *Plan) ([]*plan.ObjectRef, []int32, error) { +func ResetPreparePlan(ctx CompilerContext, preparePlan *plan.Plan) ([]*plan.ObjectRef, []int32, error) { // dcl tcl is not support var schemas []*plan.ObjectRef var paramTypes []int32 @@ -2572,20 +2531,10 @@ func HasMoCtrl(expr *plan.Expr) bool { if exprImpl.F.Func.ObjName == "mo_ctl" || exprImpl.F.Func.ObjName == "fault_inject" { return true } - for _, arg := range exprImpl.F.Args { - if HasMoCtrl(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.F.Args, HasMoCtrl) case *plan.Expr_List: - for _, arg := range exprImpl.List.List { - if HasMoCtrl(arg) { - return true - } - } - return false + return slices.ContainsFunc(exprImpl.List.List, HasMoCtrl) default: return false @@ -2600,7 +2549,7 @@ func IsFkSelfRefer(fkDbName, fkTableName, curDbName, curTableName string) bool { // HasFkSelfReferOnly checks the foreign key referencing itself only. // If there is no children tables, it also returns true // the tbleId 0 is special. it always denotes the table itself. -func HasFkSelfReferOnly(tableDef *TableDef) bool { +func HasFkSelfReferOnly(tableDef *plan.TableDef) bool { for _, tbl := range tableDef.RefChildTbls { if tbl != 0 { return false @@ -2609,7 +2558,7 @@ func HasFkSelfReferOnly(tableDef *TableDef) bool { return true } -func IsFalseExpr(e *Expr) bool { +func IsFalseExpr(e *plan.Expr) bool { if e == nil || e.GetTyp().Id != int32(types.T_bool) || e.GetLit() == nil { return false } @@ -2618,7 +2567,7 @@ func IsFalseExpr(e *Expr) bool { } return false } -func MakeFalseExpr() *Expr { +func MakeFalseExpr() *plan.Expr { return &plan.Expr{ Typ: plan.Type{ Id: int32(types.T_bool), @@ -2632,7 +2581,7 @@ func MakeFalseExpr() *Expr { } } -func MakeCPKEYRuntimeFilter(tag int32, upperlimit int32, expr *Expr, tableDef *plan.TableDef, notOnPk bool) *plan.RuntimeFilterSpec { +func MakeCPKEYRuntimeFilter(tag int32, upperlimit int32, expr *plan.Expr, tableDef *plan.TableDef, notOnPk bool) *plan.RuntimeFilterSpec { cpkeyIdx, ok := tableDef.Name2ColIndex[catalog.CPrimaryKeyColName] if !ok { panic("fail to convert runtime filter to composite primary key!") @@ -2649,7 +2598,7 @@ func MakeCPKEYRuntimeFilter(tag int32, upperlimit int32, expr *Expr, tableDef *p } } -func MakeSerialRuntimeFilter(ctx context.Context, tag int32, matchPrefix bool, upperlimit int32, expr *Expr, notOnPk bool) *plan.RuntimeFilterSpec { +func MakeSerialRuntimeFilter(ctx context.Context, tag int32, matchPrefix bool, upperlimit int32, expr *plan.Expr, notOnPk bool) *plan.RuntimeFilterSpec { serialExpr, _ := BindFuncExprImplByPlanExpr(ctx, "serial", []*plan.Expr{expr}) return &plan.RuntimeFilterSpec{ Tag: tag, @@ -2660,7 +2609,7 @@ func MakeSerialRuntimeFilter(ctx context.Context, tag int32, matchPrefix bool, u } } -func MakeRuntimeFilter(tag int32, matchPrefix bool, upperlimit int32, expr *Expr, notOnPk bool) *plan.RuntimeFilterSpec { +func MakeRuntimeFilter(tag int32, matchPrefix bool, upperlimit int32, expr *plan.Expr, notOnPk bool) *plan.RuntimeFilterSpec { return &plan.RuntimeFilterSpec{ Tag: tag, UpperLimit: upperlimit, @@ -2670,7 +2619,7 @@ func MakeRuntimeFilter(tag int32, matchPrefix bool, upperlimit int32, expr *Expr } } -func MakeIntervalExpr(num int64, str string) *Expr { +func MakeIntervalExpr(num int64, str string) *plan.Expr { arg0 := makePlan2Int64ConstExprWithType(num) arg1 := makePlan2StringConstExprWithType(str, false) return &plan.Expr{ @@ -2679,13 +2628,13 @@ func MakeIntervalExpr(num int64, str string) *Expr { }, Expr: &plan.Expr_List{ List: &plan.ExprList{ - List: []*Expr{arg0, arg1}, + List: []*plan.Expr{arg0, arg1}, }, }, } } -func GetColExpr(typ Type, relpos int32, colpos int32) *plan.Expr { +func GetColExpr(typ plan.Type, relpos int32, colpos int32) *plan.Expr { return &plan.Expr{ Typ: typ, Expr: &plan.Expr_Col{ @@ -2697,7 +2646,7 @@ func GetColExpr(typ Type, relpos int32, colpos int32) *plan.Expr { } } -func MakeSerialExtractExpr(ctx context.Context, fromExpr *Expr, origType Type, serialIdx int64) (*Expr, error) { +func MakeSerialExtractExpr(ctx context.Context, fromExpr *plan.Expr, origType plan.Type, serialIdx int64) (*plan.Expr, error) { return BindFuncExprImplByPlanExpr(ctx, "serial_extract", []*plan.Expr{ fromExpr, { @@ -2719,7 +2668,7 @@ func MakeSerialExtractExpr(ctx context.Context, fromExpr *Expr, origType Type, s }) } -func MakeInExpr(ctx context.Context, left *Expr, length int32, data []byte, matchPrefix bool) *Expr { +func MakeInExpr(ctx context.Context, left *plan.Expr, length int32, data []byte, matchPrefix bool) *plan.Expr { rightArg := &plan.Expr{ Typ: left.Typ, Expr: &plan.Expr_Vec{ @@ -2763,7 +2712,7 @@ func MakeInExpr(ctx context.Context, left *Expr, length int32, data []byte, matc } // FillValuesOfParamsInPlan replaces the params by their values -func FillValuesOfParamsInPlan(ctx context.Context, preparePlan *Plan, paramVals []any) (*Plan, error) { +func FillValuesOfParamsInPlan(ctx context.Context, preparePlan *plan.Plan, paramVals []any) (*plan.Plan, error) { copied := preparePlan switch pp := copied.Plan.(type) { @@ -2787,8 +2736,8 @@ func FillValuesOfParamsInPlan(ctx context.Context, preparePlan *Plan, paramVals return copied, nil } -func replaceParamVals(ctx context.Context, plan0 *Plan, paramVals []any) error { - params := make([]*Expr, len(paramVals)) +func replaceParamVals(ctx context.Context, plan0 *plan.Plan, paramVals []any) error { + params := make([]*plan.Expr, len(paramVals)) for i, val := range paramVals { if val == nil { pc := &plan.Literal{ @@ -2826,7 +2775,7 @@ func (builder *QueryBuilder) addNameByColRef(tag int32, tableDef *plan.TableDef) } } -func GetRowSizeFromTableDef(tableDef *TableDef, ignoreHiddenKey bool) float64 { +func GetRowSizeFromTableDef(tableDef *plan.TableDef, ignoreHiddenKey bool) float64 { size := int32(0) for _, col := range tableDef.Cols { if col.Hidden && ignoreHiddenKey { @@ -2886,7 +2835,7 @@ func Find[T ~string | ~int, S any](data map[T]S, val T) bool { return false } -func containGrouping(expr *Expr) bool { +func containGrouping(expr *plan.Expr) bool { var ret bool switch exprImpl := expr.Expr.(type) { @@ -2902,7 +2851,7 @@ func containGrouping(expr *Expr) bool { return ret } -func checkGrouping(ctx context.Context, expr *Expr) error { +func checkGrouping(ctx context.Context, expr *plan.Expr) error { if containGrouping(expr) { return moerr.NewSyntaxError(ctx, "aggregate function grouping not allowed in WHERE clause") } @@ -2912,7 +2861,7 @@ func checkGrouping(ctx context.Context, expr *Expr) error { // a > current_time() + 1 and b < ? + c and d > ? + 2 // => // a > foldVal1 and b < foldVal2 + c and d > foldVal3 -func ReplaceFoldExpr(proc *process.Process, expr *Expr, exes *[]colexec.ExpressionExecutor) (bool, error) { +func ReplaceFoldExpr(proc *process.Process, expr *plan.Expr, exes *[]colexec.ExpressionExecutor) (bool, error) { allCanFold := true var err error @@ -2988,7 +2937,7 @@ func ReplaceFoldExpr(proc *process.Process, expr *Expr, exes *[]colexec.Expressi } } -func EvalFoldExpr(proc *process.Process, expr *Expr, executors *[]colexec.ExpressionExecutor) (err error) { +func EvalFoldExpr(proc *process.Process, expr *plan.Expr, executors *[]colexec.ExpressionExecutor) (err error) { switch ef := expr.Expr.(type) { case *plan.Expr_Fold: var vec *vector.Vector @@ -3034,7 +2983,7 @@ func EvalFoldExpr(proc *process.Process, expr *Expr, executors *[]colexec.Expres return nil } -func HasFoldExprForList(exprs []*Expr) bool { +func HasFoldExprForList(exprs []*plan.Expr) bool { for _, e := range exprs { hasFoldExpr := HasFoldValExpr(e) if hasFoldExpr { @@ -3044,7 +2993,7 @@ func HasFoldExprForList(exprs []*Expr) bool { return false } -func HasFoldValExpr(expr *Expr) bool { +func HasFoldValExpr(expr *plan.Expr) bool { switch ef := expr.Expr.(type) { case *plan.Expr_Fold: return true @@ -3193,7 +3142,7 @@ func getConstantBytes(vec *vector.Vector, transAll bool, row uint64) (ret []byte // } // DbNameOfObjRef return subscription name of ObjectRef if exists, to avoid the mismatching of account id and db name -func DbNameOfObjRef(objRef *ObjectRef) string { +func DbNameOfObjRef(objRef *plan.ObjectRef) string { if objRef.SubscriptionName == "" { return objRef.SchemaName } @@ -3215,7 +3164,7 @@ func doResolveTimeStamp(timeStamp string) (ts int64, err error) { return ts, nil } -func onlyHasHiddenPrimaryKey(tableDef *TableDef) bool { +func onlyHasHiddenPrimaryKey(tableDef *plan.TableDef) bool { if tableDef == nil { return false } diff --git a/pkg/sql/plan/utils_test.go b/pkg/sql/planner/utils_test.go similarity index 99% rename from pkg/sql/plan/utils_test.go rename to pkg/sql/planner/utils_test.go index 85a23ae02959b..cb11977c29247 100644 --- a/pkg/sql/plan/utils_test.go +++ b/pkg/sql/planner/utils_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" diff --git a/pkg/sql/plan/visit_plan.go b/pkg/sql/planner/visit_plan.go similarity index 91% rename from pkg/sql/plan/visit_plan.go rename to pkg/sql/planner/visit_plan.go index 643e5344cc733..09ec2737a04a8 100644 --- a/pkg/sql/plan/visit_plan.go +++ b/pkg/sql/planner/visit_plan.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -22,19 +22,19 @@ import ( ) type VisitPlanRule interface { - MatchNode(*Node) bool + MatchNode(*plan.Node) bool IsApplyExpr() bool - ApplyNode(*Node) error - ApplyExpr(*Expr) (*Expr, error) + ApplyNode(*plan.Node) error + ApplyExpr(*plan.Expr) (*plan.Expr, error) } type VisitPlan struct { - plan *Plan + plan *plan.Plan isUpdatePlan bool rules []VisitPlanRule } -func NewVisitPlan(pl *Plan, rules []VisitPlanRule) *VisitPlan { +func NewVisitPlan(pl *plan.Plan, rules []VisitPlanRule) *VisitPlan { return &VisitPlan{ plan: pl, isUpdatePlan: false, @@ -42,7 +42,7 @@ func NewVisitPlan(pl *Plan, rules []VisitPlanRule) *VisitPlan { } } -func (vq *VisitPlan) visitNode(ctx context.Context, qry *Query, node *Node, idx int32) error { +func (vq *VisitPlan) visitNode(ctx context.Context, qry *plan.Query, node *plan.Node, idx int32) error { for i := range node.Children { if err := vq.visitNode(ctx, qry, qry.Nodes[node.Children[i]], node.Children[i]); err != nil { return err @@ -66,7 +66,7 @@ func (vq *VisitPlan) visitNode(ctx context.Context, qry *Query, node *Node, idx return nil } -func (vq *VisitPlan) exploreNode(ctx context.Context, rule VisitPlanRule, node *Node, _ int32) error { +func (vq *VisitPlan) exploreNode(ctx context.Context, rule VisitPlanRule, node *plan.Node, _ int32) error { var err error if node.Limit != nil { node.Limit, err = rule.ApplyExpr(node.Limit) @@ -143,7 +143,7 @@ func (vq *VisitPlan) exploreNode(ctx context.Context, rule VisitPlanRule, node * }, } - applyAndResetType := func(e *Expr) (*Expr, error) { + applyAndResetType := func(e *plan.Expr) (*plan.Expr, error) { oldType := e.Typ e, err = rule.ApplyExpr(e) if err != nil { @@ -196,7 +196,7 @@ func (vq *VisitPlan) exploreNode(ctx context.Context, rule VisitPlanRule, node * func (vq *VisitPlan) Visit(ctx context.Context) error { switch pl := vq.plan.Plan.(type) { - case *Plan_Query: + case *plan.Plan_Query: qry := pl.Query vq.isUpdatePlan = (pl.Query.StmtType == plan.Query_UPDATE) diff --git a/pkg/sql/plan/visit_plan_rule.go b/pkg/sql/planner/visit_plan_rule.go similarity index 91% rename from pkg/sql/plan/visit_plan_rule.go rename to pkg/sql/planner/visit_plan_rule.go index b7f44145ca623..f0becea52cab8 100644 --- a/pkg/sql/plan/visit_plan_rule.go +++ b/pkg/sql/planner/visit_plan_rule.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "context" @@ -42,7 +42,7 @@ func NewGetParamRule() *GetParamRule { } } -func (rule *GetParamRule) MatchNode(node *Node) bool { +func (rule *GetParamRule) MatchNode(node *plan.Node) bool { if node.NodeType == plan.Node_TABLE_SCAN || node.NodeType == plan.Node_INSERT { rule.schemas = append(rule.schemas, &plan.ObjectRef{ Server: int64(node.TableDef.Version), //we use this unused field to store table's version @@ -75,7 +75,7 @@ func (rule *GetParamRule) IsApplyExpr() bool { return true } -func (rule *GetParamRule) ApplyNode(node *Node) error { +func (rule *GetParamRule) ApplyNode(node *plan.Node) error { return nil } @@ -134,7 +134,7 @@ func NewResetParamOrderRule(params map[int]int) *ResetParamOrderRule { } } -func (rule *ResetParamOrderRule) MatchNode(_ *Node) bool { +func (rule *ResetParamOrderRule) MatchNode(_ *plan.Node) bool { return false } @@ -142,7 +142,7 @@ func (rule *ResetParamOrderRule) IsApplyExpr() bool { return true } -func (rule *ResetParamOrderRule) ApplyNode(node *Node) error { +func (rule *ResetParamOrderRule) ApplyNode(node *plan.Node) error { return nil } @@ -170,17 +170,17 @@ func (rule *ResetParamOrderRule) ApplyExpr(e *plan.Expr) (*plan.Expr, error) { type ResetParamRefRule struct { ctx context.Context - params []*Expr + params []*plan.Expr } -func NewResetParamRefRule(ctx context.Context, params []*Expr) *ResetParamRefRule { +func NewResetParamRefRule(ctx context.Context, params []*plan.Expr) *ResetParamRefRule { return &ResetParamRefRule{ ctx: ctx, params: params, } } -func (rule *ResetParamRefRule) MatchNode(_ *Node) bool { +func (rule *ResetParamRefRule) MatchNode(_ *plan.Node) bool { return false } @@ -188,7 +188,7 @@ func (rule *ResetParamRefRule) IsApplyExpr() bool { return true } -func (rule *ResetParamRefRule) ApplyNode(node *Node) error { +func (rule *ResetParamRefRule) ApplyNode(node *plan.Node) error { return nil } diff --git a/pkg/sql/plan/where_binder.go b/pkg/sql/planner/where_binder.go similarity index 99% rename from pkg/sql/plan/where_binder.go rename to pkg/sql/planner/where_binder.go index bdfe76d814686..74a9080e76a31 100644 --- a/pkg/sql/plan/where_binder.go +++ b/pkg/sql/planner/where_binder.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plan +package planner import ( "github.com/matrixorigin/matrixone/pkg/common/moerr" diff --git a/pkg/sql/util/composite_clusterby_util.go b/pkg/sql/util/composite_clusterby_util.go index 87c9e769ebdaa..a904030ad460a 100644 --- a/pkg/sql/util/composite_clusterby_util.go +++ b/pkg/sql/util/composite_clusterby_util.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/util/eval_expr_util.go b/pkg/sql/util/eval_expr_util.go index 3c9c723a50710..1b1cf620ac0db 100644 --- a/pkg/sql/util/eval_expr_util.go +++ b/pkg/sql/util/eval_expr_util.go @@ -30,8 +30,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/datalink" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" "github.com/matrixorigin/matrixone/pkg/vm/process" "golang.org/x/exp/constraints" ) diff --git a/pkg/sql/util/index_util.go b/pkg/sql/util/index_util.go index 3c8d049e143b0..7ae8cacf11352 100644 --- a/pkg/sql/util/index_util.go +++ b/pkg/sql/util/index_util.go @@ -20,14 +20,13 @@ import ( "github.com/cespare/xxhash/v2" "github.com/google/uuid" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/process" ) diff --git a/pkg/sql/util/index_util_test.go b/pkg/sql/util/index_util_test.go index 1bd950a342a17..a1e84f279d7f6 100644 --- a/pkg/sql/util/index_util_test.go +++ b/pkg/sql/util/index_util_test.go @@ -18,15 +18,14 @@ import ( "context" "testing" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/container/nulls" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/require" ) func TestBuildIndexTableName(t *testing.T) { diff --git a/pkg/sql/util/util.go b/pkg/sql/util/util.go index 744887c0e941f..4649d3c57aae4 100644 --- a/pkg/sql/util/util.go +++ b/pkg/sql/util/util.go @@ -276,7 +276,7 @@ type AccountIdResolver func(accountName string) (uint32, error) // - If account_id is not found, keeps the original account = 'xxx' condition // // For non-system accounts: -// - No conversion is performed because account_id filter is already added in tablescan during plan building. QueryBuiler::buildTable pkg/sql/plan/query_builder.go:4889 +// - No conversion is performed because account_id filter is already added in tablescan during plan building. QueryBuiler::buildTable pkg/sql/planner/query_builder.go:4889 // // tableAliasMap is a map of table names/aliases that refer to statement_info or metric table. If nil, only checks for unqualified column names // or table name "statement_info"/"metric". This map is used to verify if table-qualified column names (e.g., s.account, m.account) refer to statement_info or metric. diff --git a/pkg/tests/dml/dml_test.go b/pkg/tests/dml/dml_test.go index 49376bad692cf..b0ce310cd5cfa 100644 --- a/pkg/tests/dml/dml_test.go +++ b/pkg/tests/dml/dml_test.go @@ -27,12 +27,11 @@ import ( "time" _ "github.com/go-sql-driver/mysql" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/embed" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/tests/testutils" "github.com/matrixorigin/matrixone/pkg/util/executor" + "github.com/stretchr/testify/require" ) func TestDeleteAndSelect(t *testing.T) { @@ -74,8 +73,8 @@ func TestDeleteAndSelect(t *testing.T) { require.NoError(t, err) res.Close() - plan.SetForceScanOnMultiCN(true) - defer plan.SetForceScanOnMultiCN(false) + planner.SetForceScanOnMultiCN(true) + defer planner.SetForceScanOnMultiCN(false) //select * from t where a > 24500; res, err = exec.Exec( ctx, diff --git a/pkg/tests/txn/cluster_mo_ctl_test.go b/pkg/tests/txn/cluster_mo_ctl_test.go index 80e04d276d1ea..8ef2e268d64c5 100644 --- a/pkg/tests/txn/cluster_mo_ctl_test.go +++ b/pkg/tests/txn/cluster_mo_ctl_test.go @@ -22,7 +22,7 @@ import ( "github.com/lni/goutils/leaktest" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" "github.com/matrixorigin/matrixone/pkg/util/json" "github.com/stretchr/testify/require" ) diff --git a/pkg/testutil/testengine/testengine.go b/pkg/testutil/testengine/testengine.go index 21778baa95f76..97f57d309366b 100644 --- a/pkg/testutil/testengine/testengine.go +++ b/pkg/testutil/testengine/testengine.go @@ -17,16 +17,15 @@ package testengine import ( "context" - "github.com/matrixorigin/matrixone/pkg/catalog" - "github.com/matrixorigin/matrixone/pkg/defines" - "github.com/google/uuid" + "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/common/runtime" + "github.com/matrixorigin/matrixone/pkg/defines" "github.com/matrixorigin/matrixone/pkg/pb/metadata" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/txn/storage/memorystorage" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -38,7 +37,7 @@ func New( ) ( eng engine.Engine, client client.TxnClient, - compilerContext plan.CompilerContext, + compilerContext planner.CompilerContext, ) { ctx = defines.AttachAccountId(ctx, catalog.System_Account) ck := runtime.ServiceRuntime("").Clock() diff --git a/pkg/txn/storage/tae/storage_debug.go b/pkg/txn/storage/tae/storage_debug.go index 95263501220f6..1611ac96229f2 100644 --- a/pkg/txn/storage/tae/storage_debug.go +++ b/pkg/txn/storage/tae/storage_debug.go @@ -16,14 +16,14 @@ package taestorage import ( "context" - "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" "github.com/fagongzi/util/protoc" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/txn" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" + "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" ) func (s *taeStorage) Debug(ctx context.Context, diff --git a/pkg/vm/engine/disttae/cache/catalog.go b/pkg/vm/engine/disttae/cache/catalog.go index dede5c359fcc4..500c369275da8 100644 --- a/pkg/vm/engine/disttae/cache/catalog.go +++ b/pkg/vm/engine/disttae/cache/catalog.go @@ -19,13 +19,6 @@ import ( "strings" "sync" - "go.uber.org/zap" - - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/util" - - "github.com/tidwall/btree" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/compress" "github.com/matrixorigin/matrixone/pkg/container/batch" @@ -35,7 +28,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/vm/engine" + "github.com/tidwall/btree" + "go.uber.org/zap" ) func NewCatalog() *CatalogCache { @@ -827,10 +824,10 @@ func getTableDef(tblItem *TableItem, coldefs []engine.TableDef) (*plan.TableDef, } if primarykey != nil && primarykey.PkeyColName == catalog.CPrimaryKeyColName { - primarykey.CompPkeyCol = plan2.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) + primarykey.CompPkeyCol = planner.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) } if clusterByDef != nil && util.JudgeIsCompositeClusterByColumn(clusterByDef.Name) { - clusterByDef.CompCbkeyCol = plan2.GetColDefFromTable(cols, clusterByDef.Name) + clusterByDef.CompCbkeyCol = planner.GetColDefFromTable(cols, clusterByDef.Name) } return &plan.TableDef{ diff --git a/pkg/vm/engine/disttae/change_handle.go b/pkg/vm/engine/disttae/change_handle.go index 67e3fce609007..2e8036090b218 100644 --- a/pkg/vm/engine/disttae/change_handle.go +++ b/pkg/vm/engine/disttae/change_handle.go @@ -17,6 +17,7 @@ package disttae import ( "context" "fmt" + "sync" "time" "github.com/matrixorigin/matrixone/pkg/common/moerr" @@ -28,14 +29,13 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/objectio/ioutil" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/logtailreplay" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/db/checkpoint" "go.uber.org/zap" - "sync" ) const DefaultLoadParallism = 20 @@ -353,7 +353,7 @@ func (h *CheckpointChangesHandle) Next( bat := batch.NewWithSize(len(tblDef.Cols)) for i, col := range tblDef.Cols { bat.Attrs = append(bat.Attrs, col.Name) - typ := plan2.ExprType2Type(&col.Typ) + typ := planner.ExprType2Type(&col.Typ) bat.Vecs[i] = vector.NewVec(typ) } return bat diff --git a/pkg/vm/engine/disttae/db.go b/pkg/vm/engine/disttae/db.go index d4dc3eb24f037..636b1d3e21ea9 100644 --- a/pkg/vm/engine/disttae/db.go +++ b/pkg/vm/engine/disttae/db.go @@ -20,8 +20,6 @@ import ( "strings" "time" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" @@ -32,7 +30,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/txn" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" "github.com/matrixorigin/matrixone/pkg/util/fault" "github.com/matrixorigin/matrixone/pkg/vm/engine/cmd_util" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/cache" @@ -40,6 +38,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/db/checkpoint" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/logtail" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" ) // tryAdjustSysTablesCreatedTimeWithBatch analyzes the mo_tables batch and tries to adjust the created time of the system tables. diff --git a/pkg/vm/engine/disttae/engine_test.go b/pkg/vm/engine/disttae/engine_test.go index bc43ae8d16097..6079720edb66b 100644 --- a/pkg/vm/engine/disttae/engine_test.go +++ b/pkg/vm/engine/disttae/engine_test.go @@ -83,12 +83,12 @@ func TestTransaction(t *testing.T) { _, _ = txn.getRow(ctx, 0, 0, nil, nil, MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), nil) _, _ = txn.getRows(ctx, 0, 0, nil, nil, MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), nil) } diff --git a/pkg/vm/engine/disttae/logtailreplay/rows_iter.go b/pkg/vm/engine/disttae/logtailreplay/rows_iter.go index 0c13a336a77aa..38560337c1116 100644 --- a/pkg/vm/engine/disttae/logtailreplay/rows_iter.go +++ b/pkg/vm/engine/disttae/logtailreplay/rows_iter.go @@ -21,7 +21,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/logutil" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" "github.com/tidwall/btree" ) diff --git a/pkg/vm/engine/disttae/logtailreplay/rows_iter_test.go b/pkg/vm/engine/disttae/logtailreplay/rows_iter_test.go index 1e62f609caeed..d08a585154790 100644 --- a/pkg/vm/engine/disttae/logtailreplay/rows_iter_test.go +++ b/pkg/vm/engine/disttae/logtailreplay/rows_iter_test.go @@ -20,17 +20,15 @@ import ( "math/rand" "testing" - "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" - - "github.com/stretchr/testify/require" - "github.com/tidwall/btree" - "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/api" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" + "github.com/stretchr/testify/require" + "github.com/tidwall/btree" ) func TestPartitionStateRowsIter(t *testing.T) { diff --git a/pkg/vm/engine/disttae/mo_table_stats.go b/pkg/vm/engine/disttae/mo_table_stats.go index bcf6dbe308c3c..ebba8d2d93acd 100644 --- a/pkg/vm/engine/disttae/mo_table_stats.go +++ b/pkg/vm/engine/disttae/mo_table_stats.go @@ -44,8 +44,8 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/predefine" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function/ctl" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/function/ctl" ie "github.com/matrixorigin/matrixone/pkg/util/internalExecutor" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/vm/engine" diff --git a/pkg/vm/engine/disttae/stats.go b/pkg/vm/engine/disttae/stats.go index 1c056c14cd3e2..587aec4a447b0 100644 --- a/pkg/vm/engine/disttae/stats.go +++ b/pkg/vm/engine/disttae/stats.go @@ -31,12 +31,13 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/gossip" "github.com/matrixorigin/matrixone/pkg/pb/logtail" + "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/query" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/perfcounter" "github.com/matrixorigin/matrixone/pkg/queryservice/client" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/util/trace/impl/motrace/statistic" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/logtailreplay" @@ -146,7 +147,7 @@ type updateStatsRequest struct { // The following fields are needed to update the stats. // tableDef is the main table definition. - tableDef *plan2.TableDef + tableDef *plan.TableDef partitionState *logtailreplay.PartitionState fs fileservice.FileService @@ -160,7 +161,7 @@ type updateStatsRequest struct { } func newUpdateStatsRequest( - tableDef *plan2.TableDef, + tableDef *plan.TableDef, partitionState *logtailreplay.PartitionState, fs fileservice.FileService, ts types.TS, @@ -656,7 +657,7 @@ func (gs *GlobalStats) PatchStats(key pb.StatsInfoKey, patch *PatchArgs) error { stats := gs.mu.statsInfoMap[key] if stats == nil { // Create new stats if not exists - stats = plan2.NewStatsInfo() + stats = planner.NewStatsInfo() gs.mu.statsInfoMap[key] = stats } @@ -810,7 +811,7 @@ func (gs *GlobalStats) coordinateStatsUpdate(wrapKey pb.StatsInfoKeyWithContext) broadcastWithoutUpdate() return } - stats := plan2.NewStatsInfo() + stats := planner.NewStatsInfo() newCtx := perfcounter.AttachS3RequestKey(wrapKey.Ctx, crs) updated, samplingRatio = gs.executeStatsUpdate(newCtx, ps, wrapKey.Key, stats) @@ -863,7 +864,7 @@ func (gs *GlobalStats) RefreshWithMode(ctx context.Context, key pb.StatsInfoKey, } // Create stats info - stats := plan2.NewStatsInfo() + stats := planner.NewStatsInfo() approxObjectNum := int64(ps.ApproxDataObjectsNum()) lastActualObjectCnt := gs.GetBaseObjectCnt(key) @@ -981,7 +982,7 @@ func getMinMaxValueByFloat64(typ types.Type, buf []byte) float64 { dec := types.DecodeDecimal128(buf) return types.Decimal128ToFloat64(dec, typ.Scale) //case types.T_char, types.T_varchar, types.T_text: - //return float64(plan2.ByteSliceToUint64(buf)), true + //return float64(planner.ByteSliceToUint64(buf)), true default: panic("unsupported type") } @@ -1033,7 +1034,7 @@ func calcSamplingRatio(approxObjectNum int64) float64 { // get ndv, minval , maxval, datatype from zonemap. Retrieve all columns except for rowid, return accurate number of objects // Returns the actual sampling ratio (sampledObjects / totalObjects). func collectTableStats( - ctx context.Context, req *updateStatsRequest, info *plan2.TableStatsInfo, executor ConcurrentExecutor, + ctx context.Context, req *updateStatsRequest, info *planner.TableStatsInfo, executor ConcurrentExecutor, ) (float64, error) { start := time.Now() defer func() { @@ -1113,7 +1114,7 @@ func collectTableStats( columnMeta := meta.MustGetColumn(uint16(col.Seqnum)) info.NullCnts[idx] = int64(columnMeta.NullCnt()) info.ColumnZMs[idx] = columnMeta.ZoneMap().Clone() - info.DataTypes[idx] = plan2.ExprType2Type(&col.Typ) + info.DataTypes[idx] = planner.ExprType2Type(&col.Typ) columnNDV := float64(columnMeta.Ndv()) info.ColumnNDVs[idx] = columnNDV info.MaxNDVs[idx] = columnNDV @@ -1125,14 +1126,14 @@ func collectTableStats( if info.ColumnNDVs[idx] > 100 || info.ColumnNDVs[idx] > 0.1*float64(meta.BlockHeader().Rows()) { switch info.DataTypes[idx].Oid { case types.T_int64, types.T_int32, types.T_int16, types.T_uint64, types.T_uint32, types.T_uint16, types.T_time, types.T_timestamp, types.T_date, types.T_datetime, types.T_year, types.T_decimal64, types.T_decimal128: - info.ShuffleRanges[idx] = plan2.NewShuffleRange(false) + info.ShuffleRanges[idx] = planner.NewShuffleRange(false) if info.ColumnZMs[idx].IsInited() { minValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMinBuf()) maxValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMaxBuf()) info.ShuffleRanges[idx].Update(minValue, maxValue, int64(meta.BlockHeader().Rows()), int64(columnMeta.NullCnt())) } case types.T_varchar, types.T_char, types.T_text: - info.ShuffleRanges[idx] = plan2.NewShuffleRange(true) + info.ShuffleRanges[idx] = planner.NewShuffleRange(true) if info.ColumnZMs[idx].IsInited() { info.ShuffleRanges[idx].UpdateString(info.ColumnZMs[idx].GetMinBuf(), info.ColumnZMs[idx].GetMaxBuf(), int64(meta.BlockHeader().Rows()), int64(columnMeta.NullCnt())) } @@ -1189,7 +1190,7 @@ func collectTableStats( if info.ColumnNDVs[idx] > 100 || info.ColumnNDVs[idx] > 0.1*float64(info.TableRowCount) { switch info.DataTypes[idx].Oid { case types.T_int64, types.T_int32, types.T_int16, types.T_uint64, types.T_uint32, types.T_uint16, types.T_time, types.T_timestamp, types.T_date, types.T_datetime, types.T_year, types.T_decimal64, types.T_decimal128: - info.ShuffleRanges[idx] = plan2.NewShuffleRange(false) + info.ShuffleRanges[idx] = planner.NewShuffleRange(false) // Initialize with accumulated ZoneMap if available if info.ColumnZMs[idx].IsInited() { minValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMinBuf()) @@ -1198,7 +1199,7 @@ func collectTableStats( info.ShuffleRanges[idx].Update(minValue, maxValue, int64(info.TableRowCount), info.NullCnts[idx]) } case types.T_varchar, types.T_char, types.T_text: - info.ShuffleRanges[idx] = plan2.NewShuffleRange(true) + info.ShuffleRanges[idx] = planner.NewShuffleRange(true) if info.ColumnZMs[idx].IsInited() { info.ShuffleRanges[idx].UpdateString(info.ColumnZMs[idx].GetMinBuf(), info.ColumnZMs[idx].GetMaxBuf(), int64(info.TableRowCount), info.NullCnts[idx]) } @@ -1281,7 +1282,7 @@ func CollectAndCalculateStats(ctx context.Context, req *updateStatsRequest, exec v2.TxnStatementUpdateStatsDurationHistogram.Observe(time.Since(start).Seconds()) }() lenCols := len(req.tableDef.Cols) - 1 /* row-id */ - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) if req.approxObjectNum == 0 { return 1.0, nil } @@ -1292,8 +1293,8 @@ func CollectAndCalculateStats(ctx context.Context, req *updateStatsRequest, exec if err != nil { return 0, err } - plan2.UpdateStatsInfo(info, baseTableDef, req.statsInfo) - plan2.AdjustNDV(info, baseTableDef, req.statsInfo) + planner.UpdateStatsInfo(info, baseTableDef, req.statsInfo) + planner.AdjustNDV(info, baseTableDef, req.statsInfo) for i, coldef := range baseTableDef.Cols[:len(baseTableDef.Cols)-1] { colName := coldef.Name diff --git a/pkg/vm/engine/disttae/stats_collect_test.go b/pkg/vm/engine/disttae/stats_collect_test.go index 0942d06937709..1cbad33d2d50f 100644 --- a/pkg/vm/engine/disttae/stats_collect_test.go +++ b/pkg/vm/engine/disttae/stats_collect_test.go @@ -19,12 +19,11 @@ import ( "testing" "github.com/lni/goutils/leaktest" - "github.com/stretchr/testify/assert" - "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/objectio" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/index" + "github.com/stretchr/testify/assert" ) // TestCollectTableStats_ShuffleRanges_DynamicCreation tests that ShuffleRanges @@ -39,7 +38,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { // Expected: ShuffleRanges should be created when processing the second object lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // Simulate first object: 50 rows, NDV=5 (doesn't meet condition: 5 <= 100 && 5 <= 0.1*50=5, but 5 > 5 is false) @@ -111,7 +110,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { // Simulate the fix: Create ShuffleRanges if nil and condition is met if info.ShuffleRanges[0] == nil && shouldCreate { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) if info.ColumnZMs[0].IsInited() { minValue := getMinMaxValueByFloat64(info.DataTypes[0], info.ColumnZMs[0].GetMinBuf()) maxValue := getMinMaxValueByFloat64(info.DataTypes[0], info.ColumnZMs[0].GetMaxBuf()) @@ -127,7 +126,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { t.Run("create_on_third_object_when_first_two_dont_meet_condition", func(t *testing.T) { // Scenario: First two objects are small, third object makes cumulative stats meet condition lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // First object: 30 rows, NDV=2 (doesn't meet: 2 > 100 (false) || 2 > 0.1*30=3 (false)) @@ -159,7 +158,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { // Simulate the fix if info.ShuffleRanges[0] == nil && shouldCreate { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } assert.NotNil(t, info.ShuffleRanges[0], "ShuffleRanges should be created on third object") }) @@ -167,7 +166,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { t.Run("dont_create_when_never_meets_condition", func(t *testing.T) { // Scenario: Multiple small objects that never meet the condition lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // Process 10 objects, each with 10 rows and NDV=10 @@ -176,7 +175,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { info.ColumnNDVs[0] += 10 shouldCreate := info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(info.TableRowCount) if info.ShuffleRanges[0] == nil && shouldCreate { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } } @@ -186,7 +185,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation(t *testing.T) { shouldCreate := info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(info.TableRowCount) assert.True(t, shouldCreate, "Should meet condition: 100 > 10") if info.ShuffleRanges[0] == nil && shouldCreate { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } assert.NotNil(t, info.ShuffleRanges[0], "ShuffleRanges should be created when NDV=100 and rows=100") }) @@ -204,7 +203,7 @@ func TestCollectTableStats_ColumnSize_OriginSize(t *testing.T) { // This test simulates that behavior with realistic data lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) // Simulate first object: OriginSize = 4000 bytes // This matches the real logic: info.ColumnSize[idx] = int64(columnMeta.Location().OriginSize()) @@ -217,7 +216,7 @@ func TestCollectTableStats_ColumnSize_OriginSize(t *testing.T) { t.Run("subsequent_objects_accumulate_origin_size", func(t *testing.T) { // This test verifies that subsequent objects accumulate OriginSize() lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) // First object firstObjectOriginSize := int64(4000) @@ -238,7 +237,7 @@ func TestCollectTableStats_ColumnSize_OriginSize(t *testing.T) { t.Run("consistent_calculation_for_all_objects", func(t *testing.T) { // Verify that first and subsequent objects use the same calculation (OriginSize) lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) objects := []int64{4000, 8000, 12000, 16000} for i, size := range objects { @@ -266,7 +265,7 @@ func TestCollectTableStats_NDV_IndependentOfZoneMap(t *testing.T) { t.Run("ndv_accumulated_when_zonemap_not_initialized", func(t *testing.T) { // Scenario: Object has valid NDV but ZoneMap is not initialized (e.g., all NULL values) lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // First object: NDV=50, ZoneMap not initialized @@ -305,7 +304,7 @@ func TestCollectTableStats_NDV_IndependentOfZoneMap(t *testing.T) { t.Run("ndv_accumulated_for_multiple_objects_with_mixed_zonemap_status", func(t *testing.T) { lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // Object 1: NDV=30, ZoneMap initialized @@ -348,7 +347,7 @@ func TestCollectTableStats_MaxMinObjectRowCount(t *testing.T) { t.Run("max_object_row_count_tracking", func(t *testing.T) { lenCols := 2 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) // First object: 100 rows firstObjectRows := uint32(100) @@ -388,7 +387,7 @@ func TestCollectTableStats_MaxMinObjectRowCount(t *testing.T) { t.Run("ndvin_max_min_object_tracking", func(t *testing.T) { // Test that NDVinMaxObject and NDVinMinObject are updated based on MaxObjectRowCount/MinObjectRowCount lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // First object: 100 rows, NDV=50 @@ -430,7 +429,7 @@ func TestCollectTableStats_MaxMinObjectRowCount(t *testing.T) { t.Run("same_row_count_different_ndv", func(t *testing.T) { // Test handling when multiple objects have the same row count but different NDV lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) // First object: 100 rows, NDV=50 @@ -460,7 +459,7 @@ func TestCollectTableStats_ShuffleRanges_StringType(t *testing.T) { t.Run("create_string_shuffle_range_on_second_object", func(t *testing.T) { lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_varchar, 100, 0) // First object: 50 rows, NDV=50 (doesn't meet condition) @@ -493,7 +492,7 @@ func TestCollectTableStats_ShuffleRanges_StringType(t *testing.T) { // Simulate the fix: Create ShuffleRanges for string type shouldCreate := info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(info.TableRowCount) if info.ShuffleRanges[0] == nil && shouldCreate { - info.ShuffleRanges[0] = plan2.NewShuffleRange(true) + info.ShuffleRanges[0] = planner.NewShuffleRange(true) if info.ColumnZMs[0].IsInited() { info.ShuffleRanges[0].UpdateString( info.ColumnZMs[0].GetMinBuf(), @@ -515,7 +514,7 @@ func TestCollectTableStats_MultipleColumns(t *testing.T) { t.Run("multiple_columns_shuffle_range_creation", func(t *testing.T) { lenCols := 3 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.DataTypes[1] = types.New(types.T_varchar, 100, 0) info.DataTypes[2] = types.New(types.T_int32, 0, 0) @@ -540,10 +539,10 @@ func TestCollectTableStats_MultipleColumns(t *testing.T) { if info.ShuffleRanges[idx] == nil && shouldCreate { if idx == 1 { // String type - info.ShuffleRanges[idx] = plan2.NewShuffleRange(true) + info.ShuffleRanges[idx] = planner.NewShuffleRange(true) } else { // Numeric types - info.ShuffleRanges[idx] = plan2.NewShuffleRange(false) + info.ShuffleRanges[idx] = planner.NewShuffleRange(false) } } } @@ -561,7 +560,7 @@ func TestCollectTableStats_EdgeCases(t *testing.T) { t.Run("exactly_100_ndv", func(t *testing.T) { // Test boundary: NDV exactly 100 lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.TableRowCount = 1000 @@ -581,7 +580,7 @@ func TestCollectTableStats_EdgeCases(t *testing.T) { t.Run("exactly_10_percent_ndv", func(t *testing.T) { // Test boundary: NDV exactly 10% of rows lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.TableRowCount = 1000 @@ -599,7 +598,7 @@ func TestCollectTableStats_EdgeCases(t *testing.T) { t.Run("very_large_ndv_small_rows", func(t *testing.T) { // Test: Very large NDV but small number of rows lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.TableRowCount = 50 @@ -611,7 +610,7 @@ func TestCollectTableStats_EdgeCases(t *testing.T) { t.Run("very_large_rows_small_ndv", func(t *testing.T) { // Test: Very large number of rows but small NDV lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.TableRowCount = 100000 @@ -628,7 +627,7 @@ func TestCollectTableStats_EdgeCases(t *testing.T) { t.Run("zero_rows", func(t *testing.T) { // Test: Zero rows edge case lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) info.TableRowCount = 0 @@ -650,7 +649,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { // to verify that ShuffleRanges can be created dynamically in subsequent objects lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) var init bool @@ -679,7 +678,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { // Real logic from collectTableStats line 675 if info.ColumnNDVs[idx] > 100 || info.ColumnNDVs[idx] > 0.1*float64(rows) { - info.ShuffleRanges[idx] = plan2.NewShuffleRange(false) + info.ShuffleRanges[idx] = planner.NewShuffleRange(false) if info.ColumnZMs[idx].IsInited() { minValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMinBuf()) maxValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMaxBuf()) @@ -733,7 +732,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { // Real logic from collectTableStats lines 735-758 (THE FIX) if info.ShuffleRanges[idx] == nil { if info.ColumnNDVs[idx] > 100 || info.ColumnNDVs[idx] > 0.1*float64(info.TableRowCount) { - info.ShuffleRanges[idx] = plan2.NewShuffleRange(false) + info.ShuffleRanges[idx] = planner.NewShuffleRange(false) if info.ColumnZMs[idx].IsInited() { minValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMinBuf()) maxValue := getMinMaxValueByFloat64(info.DataTypes[idx], info.ColumnZMs[idx].GetMaxBuf()) @@ -772,7 +771,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { t.Run("create_on_third_object_when_first_two_dont_meet_condition", func(t *testing.T) { lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) var init bool @@ -786,7 +785,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { info.ColumnSize[0] = 2400 // Condition: 2 > 100 (false) || 2 > 0.1*30=3 (false) -> false if info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(30) { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } } assert.Nil(t, info.ShuffleRanges[0], "Should not create after first object") @@ -801,7 +800,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { // Condition: 5 > 100 (false) || 5 > 0.1*70=7 (false) -> false if info.ShuffleRanges[0] == nil { if info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(info.TableRowCount) { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } } assert.Nil(t, info.ShuffleRanges[0], "Should not create after second object") @@ -816,7 +815,7 @@ func TestCollectTableStats_ShuffleRanges_DynamicCreation_Real(t *testing.T) { // Condition: 905 > 100 (true) -> true if info.ShuffleRanges[0] == nil { if info.ColumnNDVs[0] > 100 || info.ColumnNDVs[0] > 0.1*float64(info.TableRowCount) { - info.ShuffleRanges[0] = plan2.NewShuffleRange(false) + info.ShuffleRanges[0] = planner.NewShuffleRange(false) } } assert.NotNil(t, info.ShuffleRanges[0], "ShuffleRanges should be created on third object") @@ -833,7 +832,7 @@ func TestCollectTableStats_NDV_IndependentOfZoneMap_Real(t *testing.T) { // to verify that NDV is accumulated even when ZoneMap is not initialized lenCols := 1 - info := plan2.NewTableStatsInfo(lenCols) + info := planner.NewTableStatsInfo(lenCols) info.DataTypes[0] = types.New(types.T_int64, 0, 0) var init bool diff --git a/pkg/vm/engine/disttae/stats_test.go b/pkg/vm/engine/disttae/stats_test.go index e7401529f687c..a7d231bfccc55 100644 --- a/pkg/vm/engine/disttae/stats_test.go +++ b/pkg/vm/engine/disttae/stats_test.go @@ -25,9 +25,6 @@ import ( "time" "github.com/lni/goutils/leaktest" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/mpool" @@ -36,9 +33,11 @@ import ( "github.com/matrixorigin/matrixone/pkg/lockservice" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/cache" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/logtailreplay" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func runTest( @@ -129,7 +128,7 @@ func TestUpdateStats(t *testing.T) { DatabaseID: 1000, TableID: 1001, } - stats := plan2.NewStatsInfo() + stats := planner.NewStatsInfo() ps := logtailreplay.NewPartitionState("", true, 1001, false) updated, _ := e.globalStats.executeStatsUpdate(ctx, ps, k, stats) assert.False(t, updated) @@ -149,7 +148,7 @@ func TestUpdateStats(t *testing.T) { DatabaseID: did, TableID: tid, } - stats := plan2.NewStatsInfo() + stats := planner.NewStatsInfo() ps := logtailreplay.NewPartitionState("", true, tid, false) updated, _ := e.globalStats.executeStatsUpdate(ctx, ps, k, stats) assert.False(t, updated) @@ -169,7 +168,7 @@ func TestUpdateStats(t *testing.T) { DatabaseID: did, TableID: tid, } - stats := plan2.NewStatsInfo() + stats := planner.NewStatsInfo() ps := logtailreplay.NewPartitionState("", true, tid, false) updated, _ := e.globalStats.executeStatsUpdate(ctx, ps, k, stats) assert.True(t, updated) diff --git a/pkg/vm/engine/disttae/table_meta_reader.go b/pkg/vm/engine/disttae/table_meta_reader.go index f0d40b6838136..a15b6155126f7 100644 --- a/pkg/vm/engine/disttae/table_meta_reader.go +++ b/pkg/vm/engine/disttae/table_meta_reader.go @@ -27,7 +27,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/logtailreplay" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" @@ -154,8 +154,8 @@ func (r *TableMetaReader) Read( outBatch.CleanOnlyData() if isTombstone { - pkCol := plan2.PkColByTableDef(r.table.tableDef) - pkType := plan2.ExprType2Type(&pkCol.Typ) + pkCol := planner.PkColByTableDef(r.table.tableDef) + pkType := planner.ExprType2Type(&pkCol.Typ) seqnums = []uint16{0, 1} colTypes = []types.Type{types.T_Rowid.ToType(), pkType} diff --git a/pkg/vm/engine/disttae/transfer_util.go b/pkg/vm/engine/disttae/transfer_util.go index 51e1386064577..b803bcb71ec55 100644 --- a/pkg/vm/engine/disttae/transfer_util.go +++ b/pkg/vm/engine/disttae/transfer_util.go @@ -25,7 +25,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/objectio/ioutil" "github.com/matrixorigin/matrixone/pkg/objectio/mergeutil" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/containers" @@ -117,7 +117,7 @@ func ConstructCNTombstoneObjectsTransferFlow( end.ToTimestamp(), readutil.WithColumns( []uint16{0, 1}, - []types.Type{types.T_Rowid.ToType(), plan2.ExprType2Type(&pkCol.Typ)}, + []types.Type{types.T_Rowid.ToType(), planner.ExprType2Type(&pkCol.Typ)}, ), ) @@ -176,7 +176,7 @@ type TransferFlow struct { } func (flow *TransferFlow) fillDefaults() { - pkType := plan2.ExprType2Type(&flow.table.tableDef.Cols[flow.table.primaryIdx].Typ) + pkType := planner.ExprType2Type(&flow.table.tableDef.Cols[flow.table.primaryIdx].Typ) if flow.buffer == nil { attrs, attrTypes := objectio.GetTombstoneSchema( pkType, flow.hiddenSelection, diff --git a/pkg/vm/engine/disttae/txn.go b/pkg/vm/engine/disttae/txn.go index f56aaf55b7def..b4c734b02ff6c 100644 --- a/pkg/vm/engine/disttae/txn.go +++ b/pkg/vm/engine/disttae/txn.go @@ -39,7 +39,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/pb/txn" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/txn/trace" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" @@ -841,9 +841,9 @@ func (txn *Transaction) dumpDeleteBatchLocked( return err } - pkCol = plan2.PkColByTableDef(tbl.GetTableDef(txn.proc.Ctx)) + pkCol = planner.PkColByTableDef(tbl.GetTableDef(txn.proc.Ctx)) s3Writer = colexec.NewCNS3TombstoneWriter( - txn.proc.GetMPool(), fs, plan2.ExprType2Type(&pkCol.Typ), -1, + txn.proc.GetMPool(), fs, planner.ExprType2Type(&pkCol.Typ), -1, ) for i := 0; i < len(mp[tbKey]); i++ { diff --git a/pkg/vm/engine/disttae/txn_table.go b/pkg/vm/engine/disttae/txn_table.go index 545ed84321de7..0ec0da4f34106 100644 --- a/pkg/vm/engine/disttae/txn_table.go +++ b/pkg/vm/engine/disttae/txn_table.go @@ -44,7 +44,7 @@ import ( pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/shardservice" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/txn/trace" @@ -918,7 +918,7 @@ func (tbl *txnTable) getObjList(ctx context.Context, rangesParam engine.RangesPa tbl.db.op.SnapshotTS(), func(obj objectio.ObjectEntry, isCommitted bool) (err2 error) { //if need to shuffle objects - if plan2.ShouldSkipObjByShuffle(rangesParam.Rsp, &obj.ObjectStats) { + if planner.ShouldSkipObjByShuffle(rangesParam.Rsp, &obj.ObjectStats) { return } objRelData.AppendObj(&obj.ObjectStats) @@ -991,8 +991,8 @@ func (tbl *txnTable) doRanges(ctx context.Context, rangesParam engine.RangesPara logutil.Info( "txn.table.ranges.log", zap.String("name", tbl.tableDef.Name), - zap.String("exprs", plan2.FormatExprs( - rangesParam.BlockFilters, plan2.FormatOption{ + zap.String("exprs", planner.FormatExprs( + rangesParam.BlockFilters, planner.FormatOption{ ExpandVec: false, MaxDepth: 5, }, @@ -1120,9 +1120,9 @@ func (tbl *txnTable) rangesOnePart( logutil.Info( "SLOW-RANGES:", zap.String("table", tbl.tableDef.Name), - zap.String("exprs", plan2.FormatExprs( + zap.String("exprs", planner.FormatExprs( rangesParam.BlockFilters, - plan2.FormatOption{ + planner.FormatOption{ ExpandVec: true, ExpandVecMaxLen: 2, MaxDepth: 5, @@ -1131,7 +1131,7 @@ func (tbl *txnTable) rangesOnePart( ) } - hasFoldExpr := plan2.HasFoldExprForList(rangesParam.BlockFilters) + hasFoldExpr := planner.HasFoldExprForList(rangesParam.BlockFilters) if hasFoldExpr { rangesParam.BlockFilters = nil } @@ -1156,14 +1156,14 @@ func (tbl *txnTable) rangesOnePart( // check if expr is monotonic, if not, we can skip evaluating expr for each block for _, expr := range rangesParam.BlockFilters { - auxIdCnt += plan2.AssignAuxIdForExpr(expr, auxIdCnt) + auxIdCnt += planner.AssignAuxIdForExpr(expr, auxIdCnt) } columnMap := make(map[int]int) if auxIdCnt > 0 { zms = make([]objectio.ZoneMap, auxIdCnt) vecs = make([]*vector.Vector, auxIdCnt) - plan2.GetColumnMapByExprs(rangesParam.BlockFilters, tableDef, columnMap) + planner.GetColumnMapByExprs(rangesParam.BlockFilters, tableDef, columnMap) } errCtx := errutil.ContextWithNoReport(ctx, true) @@ -1172,7 +1172,7 @@ func (tbl *txnTable) rangesOnePart( tbl.db.op.SnapshotTS(), func(obj objectio.ObjectEntry, isCommitted bool) (err2 error) { //if need to shuffle objects - if plan2.ShouldSkipObjByShuffle(rangesParam.Rsp, &obj.ObjectStats) { + if planner.ShouldSkipObjByShuffle(rangesParam.Rsp, &obj.ObjectStats) { return } var meta objectio.ObjectDataMeta @@ -1480,13 +1480,13 @@ func (tbl *txnTable) GetTableDef(ctx context.Context) *plan.TableDef { } if primarykey != nil && primarykey.PkeyColName == catalog.CPrimaryKeyColName { - primarykey.CompPkeyCol = plan2.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) + primarykey.CompPkeyCol = planner.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) } if clusterByDef != nil && util.JudgeIsCompositeClusterByColumn(clusterByDef.Name) { - clusterByDef.CompCbkeyCol = plan2.GetColDefFromTable(cols, clusterByDef.Name) + clusterByDef.CompCbkeyCol = planner.GetColDefFromTable(cols, clusterByDef.Name) } if !hasRowId { - rowIdCol := plan2.MakeRowIdColDef() + rowIdCol := planner.MakeRowIdColDef() cols = append(cols, rowIdCol) } @@ -1519,7 +1519,7 @@ func (tbl *txnTable) GetTableDef(ctx context.Context) *plan.TableDef { func (tbl *txnTable) CopyTableDef(ctx context.Context) *plan.TableDef { tbl.GetTableDef(ctx) - return plan2.DeepCopyTableDef(tbl.tableDef, true) + return plan.DeepCopyTableDef(tbl.tableDef, true) } func (tbl *txnTable) UpdateConstraint(ctx context.Context, c *engine.ConstraintDef) error { @@ -2514,8 +2514,8 @@ func (tbl *txnTable) PKPersistedBetween( keys.InplaceSort() bytes, _ := keys.MarshalBinary() - colExpr := readutil.NewColumnExpr(0, plan2.MakePlan2Type(keys.GetType()), tbl.tableDef.Pkey.PkeyColName) - inExpr := plan2.MakeInExpr( + colExpr := readutil.NewColumnExpr(0, planner.MakePlan2Type(keys.GetType()), tbl.tableDef.Pkey.PkeyColName) + inExpr := planner.MakeInExpr( tbl.proc.Load().Ctx, colExpr, int32(keys.Length()), @@ -2550,7 +2550,7 @@ func (tbl *txnTable) PKPersistedBetween( //read block ,check if keys exist in the block. pkDef := tbl.tableDef.Cols[tbl.primaryIdx] pkSeq := pkDef.Seqnum - pkType := plan2.ExprType2Type(&pkDef.Typ) + pkType := planner.ExprType2Type(&pkDef.Typ) if len(candidateBlks) > 0 { v2.TxnPKChangeCheckIOCounter.Inc() } diff --git a/pkg/vm/engine/disttae/txn_table_combined.go b/pkg/vm/engine/disttae/txn_table_combined.go index c9fcaf37627dd..9caf0775b6b2e 100644 --- a/pkg/vm/engine/disttae/txn_table_combined.go +++ b/pkg/vm/engine/disttae/txn_table_combined.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" - splan "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) @@ -183,7 +183,7 @@ func (t *combinedTxnTable) Stats( return nil, err } - value := splan.NewStatsInfo() + value := planner.NewStatsInfo() for _, rel := range tables { v, err := rel.Stats(ctx, sync) if err != nil { diff --git a/pkg/vm/engine/disttae/txn_table_delegate.go b/pkg/vm/engine/disttae/txn_table_delegate.go index 4614e72677780..ecc38db4874ba 100644 --- a/pkg/vm/engine/disttae/txn_table_delegate.go +++ b/pkg/vm/engine/disttae/txn_table_delegate.go @@ -33,7 +33,7 @@ import ( pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/shardservice" "github.com/matrixorigin/matrixone/pkg/sql/features" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/disttae/cache" @@ -576,7 +576,7 @@ func (tbl *txnTableDelegate) BuildShardingReaders( var rds []engine.Reader proc := p.(*process.Process) - if plan2.IsFalseExpr(expr) { + if planner.IsFalseExpr(expr) { return []engine.Reader{new(readutil.EmptyReader)}, nil } diff --git a/pkg/vm/engine/disttae/util_test.go b/pkg/vm/engine/disttae/util_test.go index 4dcad6a3e491f..436d56d6c9571 100644 --- a/pkg/vm/engine/disttae/util_test.go +++ b/pkg/vm/engine/disttae/util_test.go @@ -30,7 +30,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/engine/readutil" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/index" @@ -55,7 +55,7 @@ func TestCheckExprIsZonemappable(t *testing.T) { // a > 1 -> true {true, readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), })}, // a >= b -> true {true, readutil.MakeFunctionExprForTest(">=", []*plan.Expr{ @@ -70,7 +70,7 @@ func TestCheckExprIsZonemappable(t *testing.T) { t.Run("test checkExprIsZonemappable", func(t *testing.T) { for i, testCase := range testCases { - zonemappable := plan2.ExprIsZonemappable(context.TODO(), testCase.expr) + zonemappable := planner.ExprIsZonemappable(context.TODO(), testCase.expr) if zonemappable != testCase.result { t.Fatalf("checkExprIsZonemappable testExprs[%d] is different with expected", i) } @@ -109,54 +109,54 @@ func TestEvalZonemapFilter(t *testing.T) { exprs: []*plan.Expr{ readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(10), + planner.MakePlan2Float64ConstExprWithType(10), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(30), + planner.MakePlan2Float64ConstExprWithType(30), }), readutil.MakeFunctionExprForTest("<=", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(-10), + planner.MakePlan2Float64ConstExprWithType(-10), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(-10), + planner.MakePlan2Float64ConstExprWithType(-10), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeFunctionExprForTest("+", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeColExprForTest(1, types.T_float64), }), - plan2.MakePlan2Float64ConstExprWithType(60), + planner.MakePlan2Float64ConstExprWithType(60), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeFunctionExprForTest("+", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeColExprForTest(1, types.T_float64), }), - plan2.MakePlan2Float64ConstExprWithType(-5), + planner.MakePlan2Float64ConstExprWithType(-5), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeFunctionExprForTest("-", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeColExprForTest(1, types.T_float64), }), - plan2.MakePlan2Float64ConstExprWithType(-34), + planner.MakePlan2Float64ConstExprWithType(-34), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeFunctionExprForTest("-", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeColExprForTest(1, types.T_float64), }), - plan2.MakePlan2Float64ConstExprWithType(-35), + planner.MakePlan2Float64ConstExprWithType(-35), }), readutil.MakeFunctionExprForTest("<=", []*plan.Expr{ readutil.MakeFunctionExprForTest("-", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeColExprForTest(1, types.T_float64), }), - plan2.MakePlan2Float64ConstExprWithType(-35), + planner.MakePlan2Float64ConstExprWithType(-35), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), @@ -166,47 +166,47 @@ func TestEvalZonemapFilter(t *testing.T) { readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeFunctionExprForTest("+", []*plan.Expr{ readutil.MakeColExprForTest(1, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(15), + planner.MakePlan2Float64ConstExprWithType(15), }), }), readutil.MakeFunctionExprForTest(">=", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), readutil.MakeFunctionExprForTest("+", []*plan.Expr{ readutil.MakeColExprForTest(1, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(15), + planner.MakePlan2Float64ConstExprWithType(15), }), }), readutil.MakeFunctionExprForTest("or", []*plan.Expr{ readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(100), + planner.MakePlan2Float64ConstExprWithType(100), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(1, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(10), + planner.MakePlan2Float64ConstExprWithType(10), }), }), readutil.MakeFunctionExprForTest("and", []*plan.Expr{ readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(0, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(100), + planner.MakePlan2Float64ConstExprWithType(100), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeColExprForTest(1, types.T_float64), - plan2.MakePlan2Float64ConstExprWithType(0), + planner.MakePlan2Float64ConstExprWithType(0), }), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(3, types.T_varchar), - plan2.MakePlan2StringConstExprWithType("xyz"), + planner.MakePlan2StringConstExprWithType("xyz"), }), readutil.MakeFunctionExprForTest("<=", []*plan.Expr{ readutil.MakeColExprForTest(3, types.T_varchar), - plan2.MakePlan2StringConstExprWithType("efg"), + planner.MakePlan2StringConstExprWithType("efg"), }), readutil.MakeFunctionExprForTest("<", []*plan.Expr{ readutil.MakeColExprForTest(3, types.T_varchar), - plan2.MakePlan2StringConstExprWithType("efg"), + planner.MakePlan2StringConstExprWithType("efg"), }), readutil.MakeFunctionExprForTest(">", []*plan.Expr{ readutil.MakeColExprForTest(2, types.T_varchar), @@ -237,7 +237,7 @@ func TestEvalZonemapFilter(t *testing.T) { for _, tc := range cases { for i, expr := range tc.exprs { - cnt := plan2.AssignAuxIdForExpr(expr, 0) + cnt := planner.AssignAuxIdForExpr(expr, 0) zms := make([]objectio.ZoneMap, cnt) vecs := make([]*vector.Vector, cnt) zm := colexec.EvaluateFilterByZoneMap(context.Background(), proc, expr, tc.meta, columnMap, zms, vecs) diff --git a/pkg/vm/engine/memoryengine/binded.go b/pkg/vm/engine/memoryengine/binded.go index 93b1debd929de..d0d30f22ad205 100644 --- a/pkg/vm/engine/memoryengine/binded.go +++ b/pkg/vm/engine/memoryengine/binded.go @@ -17,9 +17,9 @@ package memoryengine import ( "context" + "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) diff --git a/pkg/vm/engine/memoryengine/compiler_context.go b/pkg/vm/engine/memoryengine/compiler_context.go index 10ab71ca396ac..4e48539fd1029 100644 --- a/pkg/vm/engine/memoryengine/compiler_context.go +++ b/pkg/vm/engine/memoryengine/compiler_context.go @@ -22,18 +22,18 @@ import ( "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/defines" - planpb "github.com/matrixorigin/matrixone/pkg/pb/plan" + "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/sql/parsers/tree" - "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/process" ) -var _ plan.CompilerContext = &CompilerContext{} +var _ planner.CompilerContext = &CompilerContext{} type CompilerContext struct { ctx context.Context @@ -63,7 +63,7 @@ func (c *CompilerContext) SetSnapshot(snapshot *plan.Snapshot) { panic("implement me") } -func (c *CompilerContext) InitExecuteStmtParam(execPlan *planpb.Execute) (*planpb.Plan, tree.Statement, error) { +func (c *CompilerContext) InitExecuteStmtParam(execPlan *plan.Execute) (*plan.Plan, tree.Statement, error) { //TODO implement me panic("implement me") } @@ -120,7 +120,7 @@ func (e *Engine) NewCompilerContext( } } -var _ plan.CompilerContext = new(CompilerContext) +var _ planner.CompilerContext = new(CompilerContext) func (c *CompilerContext) ResolveUdf(name string, ast []*plan.Expr) (*function.Udf, error) { return nil, nil @@ -134,7 +134,7 @@ func (*CompilerContext) Stats(obj *plan.ObjectRef, snapshot *plan.Snapshot) (*pb return nil, nil } -func (*CompilerContext) GetStatsCache() *plan.StatsCache { +func (*CompilerContext) GetStatsCache() *planner.StatsCache { return nil } @@ -155,7 +155,7 @@ func (c *CompilerContext) DatabaseExists(name string, snapshot *plan.Snapshot) b ctx := c.GetContext() txnOpt := c.txnOp - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { txnOpt = c.txnOp.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -175,7 +175,7 @@ func (c *CompilerContext) GetDatabaseId(dbName string, snapshot *plan.Snapshot) ctx := c.GetContext() txnOpt := c.txnOp - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { txnOpt = c.txnOp.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { @@ -231,7 +231,7 @@ func (c *CompilerContext) ResolveById(tableId uint64, snapshot *plan.Snapshot) ( } func (c *CompilerContext) ResolveIndexTableByRef(ref *plan.ObjectRef, tblName string, snapshot *plan.Snapshot) (*plan.ObjectRef, *plan.TableDef, error) { - return c.Resolve(plan.DbNameOfObjRef(ref), tblName, snapshot) + return c.Resolve(planner.DbNameOfObjRef(ref), tblName, snapshot) } func (c *CompilerContext) Resolve(schemaName string, tableName string, snapshot *plan.Snapshot) (objRef *plan.ObjectRef, tableDef *plan.TableDef, err error) { @@ -288,7 +288,7 @@ func (c *CompilerContext) getTableAttrs(dbName string, tableName string, snapsho ctx := c.GetContext() txnOpt := c.txnOp - if plan.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { + if planner.IsSnapshotValid(snapshot) && snapshot.TS.Less(c.txnOp.Txn().SnapshotTS) { txnOpt = c.txnOp.CloneSnapshotOp(*snapshot.TS) if snapshot.Tenant != nil { diff --git a/pkg/vm/engine/memoryengine/engine.go b/pkg/vm/engine/memoryengine/engine.go index 2e76cf54f5053..2a3054ec8145f 100644 --- a/pkg/vm/engine/memoryengine/engine.go +++ b/pkg/vm/engine/memoryengine/engine.go @@ -22,9 +22,9 @@ import ( "github.com/matrixorigin/matrixone/pkg/clusterservice" "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/pb/metadata" + "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) diff --git a/pkg/vm/engine/memoryengine/table.go b/pkg/vm/engine/memoryengine/table.go index 977efbcca41a2..18844d20c10c6 100644 --- a/pkg/vm/engine/memoryengine/table.go +++ b/pkg/vm/engine/memoryengine/table.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/api" "github.com/matrixorigin/matrixone/pkg/pb/plan" pb "github.com/matrixorigin/matrixone/pkg/pb/statsinfo" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/sql/util" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -254,25 +254,25 @@ func (t *Table) GetTableDef(ctx context.Context) *plan.TableDef { return nil } - var clusterByDef *plan2.ClusterByDef - var cols []*plan2.ColDef + var clusterByDef *plan.ClusterByDef + var cols []*plan.ColDef var schemaVersion uint32 - var defs []*plan2.TableDefType - var properties []*plan2.Property + var defs []*plan.TableDef_DefType + var properties []*plan.Property var TableType, Createsql string - var viewSql *plan2.ViewDef - var foreignKeys []*plan2.ForeignKeyDef - var primarykey *plan2.PrimaryKeyDef - var indexes []*plan2.IndexDef + var viewSql *plan.ViewDef + var foreignKeys []*plan.ForeignKeyDef + var primarykey *plan.PrimaryKeyDef + var indexes []*plan.IndexDef var refChildTbls []uint64 for _, def := range engineDefs { if attr, ok := def.(*engine.AttributeDef); ok { - col := &plan2.ColDef{ + col := &plan.ColDef{ ColId: attr.Attr.ID, Name: strings.ToLower(attr.Attr.Name), OriginName: attr.Attr.Name, - Typ: plan2.Type{ + Typ: plan.Type{ Id: int32(attr.Attr.Type.Oid), Width: attr.Attr.Type.Width, Scale: attr.Attr.Type.Scale, @@ -304,13 +304,13 @@ func (t *Table) GetTableDef(ctx context.Context) *plan.TableDef { Createsql = p.Value default: } - properties = append(properties, &plan2.Property{ + properties = append(properties, &plan.Property{ Key: p.Key, Value: p.Value, }) } } else if viewDef, ok := def.(*engine.ViewDef); ok { - viewSql = &plan2.ViewDef{ + viewSql = &plan.ViewDef{ View: viewDef.View, } } else if c, ok := def.(*engine.ConstraintDef); ok { @@ -329,7 +329,7 @@ func (t *Table) GetTableDef(ctx context.Context) *plan.TableDef { } } } else if commnetDef, ok := def.(*engine.CommentDef); ok { - properties = append(properties, &plan2.Property{ + properties = append(properties, &plan.Property{ Key: catalog.SystemRelAttr_Comment, Value: commnetDef.Comment, }) @@ -338,9 +338,9 @@ func (t *Table) GetTableDef(ctx context.Context) *plan.TableDef { } } if len(properties) > 0 { - defs = append(defs, &plan2.TableDefType{ - Def: &plan2.TableDef_DefType_Properties{ - Properties: &plan2.PropertiesDef{ + defs = append(defs, &plan.TableDef_DefType{ + Def: &plan.TableDef_DefType_Properties{ + Properties: &plan.PropertiesDef{ Properties: properties, }, }, @@ -348,15 +348,15 @@ func (t *Table) GetTableDef(ctx context.Context) *plan.TableDef { } if primarykey != nil && primarykey.PkeyColName == catalog.CPrimaryKeyColName { - primarykey.CompPkeyCol = plan2.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) + primarykey.CompPkeyCol = planner.GetColDefFromTable(cols, catalog.CPrimaryKeyColName) } if clusterByDef != nil && util.JudgeIsCompositeClusterByColumn(clusterByDef.Name) { - clusterByDef.CompCbkeyCol = plan2.GetColDefFromTable(cols, clusterByDef.Name) + clusterByDef.CompCbkeyCol = planner.GetColDefFromTable(cols, clusterByDef.Name) } - rowIdCol := plan2.MakeRowIdColDef() + rowIdCol := planner.MakeRowIdColDef() cols = append(cols, rowIdCol) - tableDef := &plan2.TableDef{ + tableDef := &plan.TableDef{ TblId: t.GetTableID(ctx), Name: t.tableName, Cols: cols, diff --git a/pkg/vm/engine/readutil/exec_util.go b/pkg/vm/engine/readutil/exec_util.go index f1b5a0619868c..22da66aba0a19 100644 --- a/pkg/vm/engine/readutil/exec_util.go +++ b/pkg/vm/engine/readutil/exec_util.go @@ -18,9 +18,6 @@ import ( "context" "time" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/vm/engine" - "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" @@ -30,8 +27,10 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/util" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" + "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/containers" ) @@ -140,7 +139,7 @@ func FilterObjects( } onObject := func(objStats *objectio.ObjectStats) (err error) { //if need to shuffle objects - if plan2.ShouldSkipObjByShuffle(rangesParam.Rsp, objStats) { + if planner.ShouldSkipObjByShuffle(rangesParam.Rsp, objStats) { return } var ok bool diff --git a/pkg/vm/engine/readutil/expr_util.go b/pkg/vm/engine/readutil/expr_util.go index 39163fe274d9e..ea6e98d14fa4b 100644 --- a/pkg/vm/engine/readutil/expr_util.go +++ b/pkg/vm/engine/readutil/expr_util.go @@ -18,8 +18,6 @@ import ( "context" "strings" - "go.uber.org/zap" - "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/batch" "github.com/matrixorigin/matrixone/pkg/container/types" @@ -27,11 +25,12 @@ import ( "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" - "github.com/matrixorigin/matrixone/pkg/sql/plan/rule" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" + "github.com/matrixorigin/matrixone/pkg/sql/planner/rule" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/common" "github.com/matrixorigin/matrixone/pkg/vm/process" + "go.uber.org/zap" ) func NewColumnExpr(pos int, typ plan.Type, name string) *plan.Expr { @@ -52,8 +51,8 @@ func ConstructInExpr( colVec *vector.Vector, ) *plan.Expr { data, _ := colVec.MarshalBinary() - colExpr := NewColumnExpr(0, plan2.MakePlan2Type(colVec.GetType()), colName) - return plan2.MakeInExpr( + colExpr := NewColumnExpr(0, planner.MakePlan2Type(colVec.GetType()), colName) + return planner.MakeInExpr( ctx, colExpr, int32(colVec.Length()), @@ -78,7 +77,7 @@ func getColDefByName(expr *plan.Expr, name string, colPos int32, tableDef *plan. zap.String("col-name", name), zap.Int32("col-actual-pos", colPos), zap.Int32("col-expected-pos", pos), - zap.String("col-expr", plan2.FormatExpr(expr, plan2.FormatOption{})), + zap.String("col-expr", planner.FormatExpr(expr, planner.FormatOption{})), ) } }) @@ -121,7 +120,7 @@ func evalValue( "Bad-ColExpr", zap.String("col-name", colName), zap.String("pk-name", pkName), - zap.String("col-expr", plan2.FormatExpr(expr, plan2.FormatOption{})), + zap.String("col-expr", planner.FormatExpr(expr, planner.FormatOption{})), ) } }) @@ -146,7 +145,7 @@ func evalValue( zap.String("col-name", colName), zap.Int32("col-actual-pos", col.Col.ColPos), zap.Int32("col-expected-pos", colPos), - zap.String("col-expr", plan2.FormatExpr(expr, plan2.FormatOption{})), + zap.String("col-expr", planner.FormatExpr(expr, planner.FormatOption{})), ) } }) @@ -222,7 +221,7 @@ func getConstBytesFromExpr(exprs []*plan.Expr) ([][]byte, bool) { vals[idx] = nil vals[idx] = append(vals[idx], fExpr.Fold.Data...) } else { - logutil.Warnf("const folded val expr is not a fold expr: %s\n", plan2.FormatExpr(exprs[idx], plan2.FormatOption{})) + logutil.Warnf("const folded val expr is not a fold expr: %s\n", planner.FormatExpr(exprs[idx], planner.FormatOption{})) return nil, false } } @@ -271,7 +270,7 @@ func mustColVecValueFromBinaryFuncExpr(expr *plan.Expr_F) (*plan.Expr_Col, []byt return colExpr, fExpr.Fold.Data, ok } - logutil.Warnf("const folded val expr is not a vec expr: %s\n", plan2.FormatExpr(valExpr, plan2.FormatOption{})) + logutil.Warnf("const folded val expr is not a vec expr: %s\n", planner.FormatExpr(valExpr, planner.FormatOption{})) return nil, nil, false } @@ -720,7 +719,7 @@ func MakeColExprForTest(idx int32, typ types.T, colName ...string) *plan.Expr { } containerType := typ.ToType() - exprType := plan2.MakePlan2Type(&containerType) + exprType := planner.MakePlan2Type(&containerType) return &plan.Expr{ Typ: exprType, @@ -737,7 +736,7 @@ func MakeColExprForTest(idx int32, typ types.T, colName ...string) *plan.Expr { func MakeFunctionExprForTest(name string, args []*plan.Expr) *plan.Expr { argTypes := make([]types.Type, len(args)) for i, arg := range args { - argTypes[i] = plan2.MakeTypeByPlan2Expr(arg) + argTypes[i] = planner.MakeTypeByPlan2Expr(arg) } finfo, err := function.GetFunctionByName(context.TODO(), name, argTypes) @@ -748,7 +747,7 @@ func MakeFunctionExprForTest(name string, args []*plan.Expr) *plan.Expr { retTyp := finfo.GetReturnType() return &plan.Expr{ - Typ: plan2.MakePlan2Type(&retTyp), + Typ: planner.MakePlan2Type(&retTyp), Expr: &plan.Expr_F{ F: &plan.Function{ Func: &plan.ObjectRef{ @@ -784,7 +783,7 @@ func MakeInExprForTest[T any]( Args: []*plan.Expr{ arg0, { - Typ: plan2.MakePlan2Type(vec.GetType()), + Typ: planner.MakePlan2Type(vec.GetType()), Expr: &plan.Expr_Vec{ Vec: &plan.LiteralVec{ Len: int32(len(vals)), diff --git a/pkg/vm/engine/readutil/expr_util_test.go b/pkg/vm/engine/readutil/expr_util_test.go index 12e086cc2cf2f..7822e0cc0fbab 100644 --- a/pkg/vm/engine/readutil/expr_util_test.go +++ b/pkg/vm/engine/readutil/expr_util_test.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/stretchr/testify/require" ) @@ -39,24 +39,24 @@ func TestGetNonIntPkValueByExpr(t *testing.T) { // a > "a" false only 'and', '=' function is supported {false, 0, MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2StringConstExprWithType("a"), + planner.MakePlan2StringConstExprWithType("a"), }), types.T_int64}, // a = 100 true {true, int64(100), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(100), + planner.MakePlan2Int64ConstExprWithType(100), }), types.T_int64}, // b > 10 and a = "abc" true {true, []byte("abc"), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2StringConstExprWithType("abc"), + planner.MakePlan2StringConstExprWithType("abc"), }), }), types.T_char}, } @@ -110,27 +110,27 @@ func TestGetPKExpr(t *testing.T) { // a=10 MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), // a=20 and a=10 MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), // 30=a and 20=a MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), MakeColExprForTest(0, types.T_int64), }), MakeFunctionExprForTest("=", []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), MakeColExprForTest(0, types.T_int64), }), }), @@ -144,47 +144,47 @@ func TestGetPKExpr(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(40), + planner.MakePlan2Int64ConstExprWithType(40), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(50), + planner.MakePlan2Int64ConstExprWithType(50), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(70), + planner.MakePlan2Int64ConstExprWithType(70), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(80), + planner.MakePlan2Int64ConstExprWithType(80), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(2, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(90), + planner.MakePlan2Int64ConstExprWithType(90), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(70), + planner.MakePlan2Int64ConstExprWithType(70), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeInExprForTest[int64]( MakeColExprForTest(0, types.T_int64), @@ -197,65 +197,65 @@ func TestGetPKExpr(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), }, valExprs: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(10), - plan2.MakePlan2Int64ConstExprWithType(20), - plan2.MakePlan2Int64ConstExprWithType(30), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(2)), - plan2.MakePlan2Int64ConstExprWithType(50), + planner.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(2)), + planner.MakePlan2Int64ConstExprWithType(50), nil, nil, { Expr: &plan.Expr_List{ List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(60), - plan2.MakePlan2Int64ConstExprWithType(70), + planner.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(70), }, }, }, @@ -268,8 +268,8 @@ func TestGetPKExpr(t *testing.T) { Expr: &plan.Expr_List{ List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(60), - plan2.MakePlan2Int64VecExprWithType(m, int64(70), int64(80)), + planner.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64VecExprWithType(m, int64(70), int64(80)), }, }, }, @@ -279,13 +279,13 @@ func TestGetPKExpr(t *testing.T) { }, }, nil, - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), { Expr: &plan.Expr_List{ List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(20), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(30), }, }, }, @@ -300,7 +300,7 @@ func TestGetPKExpr(t *testing.T) { for i, expr := range tc.exprs { rExpr := getPkExpr(expr, pkName, proc) // if rExpr != nil { - // t.Logf("%s||||%s||||%s", plan2.FormatExpr(expr), plan2.FormatExpr(rExpr), tc.desc[i]) + // t.Logf("%s||||%s||||%s", planner.FormatExpr(expr), planner.FormatExpr(rExpr), tc.desc[i]) // } require.Equalf(t, tc.valExprs[i], rExpr, tc.desc[i]) } @@ -339,7 +339,7 @@ func TestGetPkExprValue(t *testing.T) { return expect[0] == actualVal } - nullExpr := plan2.MakePlan2Int64ConstExprWithType(0) + nullExpr := planner.MakePlan2Int64ConstExprWithType(0) nullExpr.Expr.(*plan.Expr_Lit).Lit.Isnull = true tc := testCase{ @@ -361,52 +361,52 @@ func TestGetPkExprValue(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(2), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), }), MakeFunctionExprForTest("in", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(2)), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(2)), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(2), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("in", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64ConstExprWithType(5), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(6), + planner.MakePlan2Int64ConstExprWithType(6), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(7), + planner.MakePlan2Int64ConstExprWithType(7), }), }), }), @@ -418,7 +418,7 @@ func TestGetPkExprValue(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), @@ -427,7 +427,7 @@ func TestGetPkExprValue(t *testing.T) { }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(2), }), }), }, @@ -450,7 +450,7 @@ func TestGetPkExprValue(t *testing.T) { } expr := MakeFunctionExprForTest("in", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), }) canEval, _, _, _ := getPkValueByExpr(expr, "a", types.T_int64, true, proc) require.False(t, canEval) @@ -459,7 +459,7 @@ func TestGetPkExprValue(t *testing.T) { expr = MakeFunctionExprForTest("in", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64VecExprWithType(m, int64(1)), + planner.MakePlan2Int64VecExprWithType(m, int64(1)), }) canEval, _, _, val := getPkValueByExpr(expr, "a", types.T_int64, true, proc) require.True(t, canEval) @@ -495,30 +495,30 @@ func TestEvalExprListToVec(t *testing.T) { { List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(2), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(1), }, }, }, { List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(1), - plan2.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(2), }, }, }, { List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(2), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), + planner.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), { Expr: &plan.Expr_List{ List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64VecExprWithType(m, int64(4), int64(8)), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64VecExprWithType(m, int64(4), int64(8)), + planner.MakePlan2Int64ConstExprWithType(5), }, }, }, @@ -529,14 +529,14 @@ func TestEvalExprListToVec(t *testing.T) { { List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(2), - plan2.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), + planner.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64VecExprWithType(m, int64(1), int64(10)), { Expr: &plan.Expr_List{ List: &plan.ExprList{ List: []*plan.Expr{ - plan2.MakePlan2Int64VecExprWithType(m, int64(4), int64(8)), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64VecExprWithType(m, int64(4), int64(8)), + planner.MakePlan2Int64ConstExprWithType(5), }, }, }, @@ -570,7 +570,7 @@ func TestEvalExprListToVec(t *testing.T) { for i, expr := range tc.exprs { // for _, e2 := range expr.List.List { - // t.Log(plan2.FormatExpr(e2)) + // t.Log(planner.FormatExpr(e2)) // } canEval, vec, put := evalExprListToVec(tc.oids[i], expr, proc) require.Equalf(t, tc.canEvals[i], canEval, tc.desc[i]) diff --git a/pkg/vm/engine/readutil/filter_test.go b/pkg/vm/engine/readutil/filter_test.go index 6931e5ab55f48..272241d44f14b 100644 --- a/pkg/vm/engine/readutil/filter_test.go +++ b/pkg/vm/engine/readutil/filter_test.go @@ -24,17 +24,16 @@ import ( "testing" "time" - "github.com/matrixorigin/matrixone/pkg/common/moerr" - "github.com/matrixorigin/matrixone/pkg/objectio" - "github.com/matrixorigin/matrixone/pkg/common/bloomfilter" + "github.com/matrixorigin/matrixone/pkg/common/moerr" "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" + "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/common" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/containers" @@ -198,27 +197,27 @@ func Test_ConstructBasePKFilter(t *testing.T) { // a=10 MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), // a=20 and a=10 MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), // 30=a and 20=a MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), MakeColExprForTest(0, types.T_int64), }), MakeFunctionExprForTest("=", []*plan.Expr{ - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), MakeColExprForTest(0, types.T_int64), }), }), @@ -232,47 +231,47 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(40), + planner.MakePlan2Int64ConstExprWithType(40), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(50), + planner.MakePlan2Int64ConstExprWithType(50), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(70), + planner.MakePlan2Int64ConstExprWithType(70), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(80), + planner.MakePlan2Int64ConstExprWithType(80), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(2, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(90), + planner.MakePlan2Int64ConstExprWithType(90), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(70), + planner.MakePlan2Int64ConstExprWithType(70), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(60), + planner.MakePlan2Int64ConstExprWithType(60), }), MakeInExprForTest[int64]( MakeColExprForTest(0, types.T_int64), @@ -285,48 +284,48 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(20), + planner.MakePlan2Int64ConstExprWithType(20), }), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(30), + planner.MakePlan2Int64ConstExprWithType(30), }), }), @@ -334,41 +333,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -376,41 +375,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -418,41 +417,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -460,41 +459,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -502,41 +501,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -544,41 +543,41 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(1), + planner.MakePlan2Int64ConstExprWithType(1), }), MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(3), + planner.MakePlan2Int64ConstExprWithType(3), }), }), @@ -586,61 +585,61 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64ConstExprWithType(5), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(15), + planner.MakePlan2Int64ConstExprWithType(15), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64ConstExprWithType(5), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(15), + planner.MakePlan2Int64ConstExprWithType(15), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), MakeFunctionExprForTest("and", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), @@ -648,61 +647,61 @@ func Test_ConstructBasePKFilter(t *testing.T) { MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64ConstExprWithType(5), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(15), + planner.MakePlan2Int64ConstExprWithType(15), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(5), + planner.MakePlan2Int64ConstExprWithType(5), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(15), + planner.MakePlan2Int64ConstExprWithType(15), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest(">=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("<=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(10), + planner.MakePlan2Int64ConstExprWithType(10), }), }), } @@ -739,10 +738,10 @@ func Test_ConstructBasePKFilter(t *testing.T) { var exes []colexec.ExpressionExecutor for _, expr := range exprs { - plan2.ReplaceFoldExpr(proc, expr, &exes) + planner.ReplaceFoldExpr(proc, expr, &exes) } for i, expr := range exprs { - plan2.EvalFoldExpr(proc, expr, &exes) + planner.EvalFoldExpr(proc, expr, &exes) BasePKFilter, err := ConstructBasePKFilter(expr, tableDef, proc.Mp()) require.NoError(t, err) @@ -804,14 +803,14 @@ func TestConstructBasePKFilterWithOr(t *testing.T) { makeEq := func(v int64) *plan.Expr { return MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(v), + planner.MakePlan2Int64ConstExprWithType(v), }) } makeLessThan := func(v int64) *plan.Expr { return MakeFunctionExprForTest("<", []*plan.Expr{ MakeColExprForTest(0, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(v), + planner.MakePlan2Int64ConstExprWithType(v), }) } @@ -878,7 +877,7 @@ func TestConstructBasePKFilterWithOr(t *testing.T) { expr: MakeFunctionExprForTest("or", []*plan.Expr{ MakeFunctionExprForTest("=", []*plan.Expr{ MakeColExprForTest(1, types.T_int64), - plan2.MakePlan2Int64ConstExprWithType(2), + planner.MakePlan2Int64ConstExprWithType(2), }), makeEq(3), }), @@ -927,10 +926,10 @@ func TestConstructBasePKFilterWithOr(t *testing.T) { var exes []colexec.ExpressionExecutor for i := range testCases { - plan2.ReplaceFoldExpr(proc, testCases[i].expr, &exes) + planner.ReplaceFoldExpr(proc, testCases[i].expr, &exes) } for i := range testCases { - plan2.EvalFoldExpr(proc, testCases[i].expr, &exes) + planner.EvalFoldExpr(proc, testCases[i].expr, &exes) basePKFilter, err := ConstructBasePKFilter(testCases[i].expr, tableDef, proc.Mp()) require.NoError(t, err, testCases[i].name) diff --git a/pkg/vm/engine/readutil/pk_filter.go b/pkg/vm/engine/readutil/pk_filter.go index 7438a83136068..5cc6042ec3422 100644 --- a/pkg/vm/engine/readutil/pk_filter.go +++ b/pkg/vm/engine/readutil/pk_filter.go @@ -24,7 +24,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/logutil" "github.com/matrixorigin/matrixone/pkg/objectio" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/containers" "go.uber.org/zap" ) diff --git a/pkg/vm/engine/readutil/pk_filter_base.go b/pkg/vm/engine/readutil/pk_filter_base.go index af4e758358a3a..fd7e707fb8837 100644 --- a/pkg/vm/engine/readutil/pk_filter_base.go +++ b/pkg/vm/engine/readutil/pk_filter_base.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" ) const ( @@ -329,7 +329,7 @@ func ConstructBasePKFilter( //panic(name) } default: - //panic(plan2.FormatExpr(expr)) + //panic(planner.FormatExpr(expr)) } return diff --git a/pkg/vm/engine/readutil/pk_filter_mem.go b/pkg/vm/engine/readutil/pk_filter_mem.go index 6f610d0c2b8c7..1042402766d53 100644 --- a/pkg/vm/engine/readutil/pk_filter_mem.go +++ b/pkg/vm/engine/readutil/pk_filter_mem.go @@ -26,7 +26,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" "github.com/matrixorigin/matrixone/pkg/vm/engine" "go.uber.org/zap" ) diff --git a/pkg/vm/engine/readutil/pk_filter_mem_test.go b/pkg/vm/engine/readutil/pk_filter_mem_test.go index 4061189bbb153..d91cf627cf56c 100644 --- a/pkg/vm/engine/readutil/pk_filter_mem_test.go +++ b/pkg/vm/engine/readutil/pk_filter_mem_test.go @@ -17,19 +17,18 @@ package readutil import ( "testing" - "github.com/matrixorigin/matrixone/pkg/common/mpool" - "github.com/matrixorigin/matrixone/pkg/container/vector" - "github.com/matrixorigin/matrixone/pkg/objectio" - "github.com/matrixorigin/matrixone/pkg/vm/engine" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/bloomfilter" + "github.com/matrixorigin/matrixone/pkg/common/mpool" "github.com/matrixorigin/matrixone/pkg/container/types" + "github.com/matrixorigin/matrixone/pkg/container/vector" "github.com/matrixorigin/matrixone/pkg/fileservice" + "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" - "github.com/matrixorigin/matrixone/pkg/sql/plan/function" + "github.com/matrixorigin/matrixone/pkg/sql/function" + "github.com/matrixorigin/matrixone/pkg/vm/engine" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestNewMemPKFilter_WithBF(t *testing.T) { diff --git a/pkg/vm/engine/readutil/reader.go b/pkg/vm/engine/readutil/reader.go index 9cc154a3342a9..ccabd2d75b643 100644 --- a/pkg/vm/engine/readutil/reader.go +++ b/pkg/vm/engine/readutil/reader.go @@ -35,7 +35,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/plan" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/perfcounter" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" v2 "github.com/matrixorigin/matrixone/pkg/util/metric/v2" "github.com/matrixorigin/matrixone/pkg/vectorindex/metric" "github.com/matrixorigin/matrixone/pkg/vm/engine" @@ -72,7 +72,7 @@ func (mixin *withFilterMixin) tryUpdateTombstoneColumns(cols []string) { mixin.columns.seqnums = []uint16{0, 1} mixin.columns.colTypes = []types.Type{ types.T_Rowid.ToType(), - plan2.ExprType2Type(&pkCol.Typ)} + planner.ExprType2Type(&pkCol.Typ)} mixin.columns.colTypes[1].Scale = pkCol.Typ.Scale mixin.columns.colTypes[1].Width = pkCol.Typ.Width @@ -128,7 +128,7 @@ func (mixin *withFilterMixin) tryUpdateColumns(cols []string) { mixin.columns.colTypes[i] = objectio.RowidType mixin.columns.phyAddrPos = i } else { - if plan2.GetSortOrderByName(mixin.tableDef, column) == 0 { + if planner.GetSortOrderByName(mixin.tableDef, column) == 0 { mixin.columns.indexOfFirstSortedColumn = i } colIdx := mixin.tableDef.Name2ColIndex[column] @@ -139,7 +139,7 @@ func (mixin *withFilterMixin) tryUpdateColumns(cols []string) { // primary key is in the cols pkPos = i } - mixin.columns.colTypes[i] = plan2.ExprType2Type(&colDef.Typ) + mixin.columns.colTypes[i] = planner.ExprType2Type(&colDef.Typ) mixin.columns.colTypes[i].Scale = colDef.Typ.Scale mixin.columns.colTypes[i].Width = colDef.Typ.Width } diff --git a/pkg/vm/engine/readutil/relation_data.go b/pkg/vm/engine/readutil/relation_data.go index 835d98ee35952..9a5a84257d448 100644 --- a/pkg/vm/engine/readutil/relation_data.go +++ b/pkg/vm/engine/readutil/relation_data.go @@ -22,7 +22,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/container/types" "github.com/matrixorigin/matrixone/pkg/objectio" "github.com/matrixorigin/matrixone/pkg/pb/plan" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" "github.com/matrixorigin/matrixone/pkg/vm/engine" ) @@ -295,7 +295,7 @@ func (or *ObjListRelData) Split(cpunum int) []engine.RelData { result[0].AppendBlockInfo(&objectio.EmptyBlockInfo) } for i := range or.Objlist { - shuffleIDX := int(plan2.CalcRangeShuffleIDXForObj(rsp, &or.Objlist[i], int(rsp.CNCNT)*cpunum)) % cpunum + shuffleIDX := int(planner.CalcRangeShuffleIDXForObj(rsp, &or.Objlist[i], int(rsp.CNCNT)*cpunum)) % cpunum blks := objectio.ObjectStatsToBlockInfoSlice(&or.Objlist[i], false) result[shuffleIDX].AppendBlockInfoSlice(blks) } diff --git a/pkg/vm/engine/test/reader_test.go b/pkg/vm/engine/test/reader_test.go index 91bb392e66e12..d1dca672dac6e 100644 --- a/pkg/vm/engine/test/reader_test.go +++ b/pkg/vm/engine/test/reader_test.go @@ -21,8 +21,6 @@ import ( "testing" "time" - "github.com/stretchr/testify/require" - "github.com/matrixorigin/matrixone/pkg/catalog" "github.com/matrixorigin/matrixone/pkg/common/morpc" "github.com/matrixorigin/matrixone/pkg/common/mpool" @@ -37,7 +35,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/pb/shard" "github.com/matrixorigin/matrixone/pkg/pb/timestamp" "github.com/matrixorigin/matrixone/pkg/sql/colexec" - plan2 "github.com/matrixorigin/matrixone/pkg/sql/plan" + "github.com/matrixorigin/matrixone/pkg/sql/planner" testutil3 "github.com/matrixorigin/matrixone/pkg/testutil" "github.com/matrixorigin/matrixone/pkg/txn/client" "github.com/matrixorigin/matrixone/pkg/util/fault" @@ -51,6 +49,7 @@ import ( "github.com/matrixorigin/matrixone/pkg/vm/engine/tae/iface/handle" "github.com/matrixorigin/matrixone/pkg/vm/engine/test/testutil" "github.com/matrixorigin/matrixone/pkg/vm/process" + "github.com/stretchr/testify/require" ) func Test_ReaderCanReadRangesBlocksWithoutDeletes(t *testing.T) { @@ -133,14 +132,14 @@ func Test_ReaderCanReadRangesBlocksWithoutDeletes(t *testing.T) { expr := []*plan.Expr{ readutil.MakeFunctionExprForTest("=", []*plan.Expr{ readutil.MakeColExprForTest(int32(primaryKeyIdx), schema.ColDefs[primaryKeyIdx].Type.Oid, schema.ColDefs[primaryKeyIdx].Name), - plan2.MakePlan2Int64ConstExprWithType(bats[0].Vecs[primaryKeyIdx].Get(0).(int64)), + planner.MakePlan2Int64ConstExprWithType(bats[0].Vecs[primaryKeyIdx].Get(0).(int64)), }), } for _, e := range expr { - plan2.ReplaceFoldExpr(proc, e, &exes) + planner.ReplaceFoldExpr(proc, e, &exes) } for _, e := range expr { - plan2.EvalFoldExpr(proc, e, &exes) + planner.EvalFoldExpr(proc, e, &exes) } txn, _, reader, err := testutil.GetTableTxnReader( @@ -236,7 +235,7 @@ func TestReaderCanReadUncommittedInMemInsertAndDeletes(t *testing.T) { expr := []*plan.Expr{ readutil.MakeFunctionExprForTest("=", []*plan.Expr{ readutil.MakeColExprForTest(int32(primaryKeyIdx), schema.ColDefs[primaryKeyIdx].Type.Oid, schema.ColDefs[primaryKeyIdx].Name), - plan2.MakePlan2Int64ConstExprWithType(bat1.Vecs[primaryKeyIdx].Get(9).(int64)), + planner.MakePlan2Int64ConstExprWithType(bat1.Vecs[primaryKeyIdx].Get(9).(int64)), }), }