diff --git a/pkg/frontend/init_db.go b/pkg/frontend/init_db.go
deleted file mode 100644
index 43205d82d4e3185a453a08156a089d7b18a1b6a7..0000000000000000000000000000000000000000
--- a/pkg/frontend/init_db.go
+++ /dev/null
@@ -1,961 +0,0 @@
-// Copyright 2021 Matrix Origin
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package frontend
-
-import (
-	"context"
-	"errors"
-	"fmt"
-	"github.com/matrixorigin/matrixone/pkg/pb/plan"
-
-	"github.com/matrixorigin/matrixone/pkg/container/batch"
-	"github.com/matrixorigin/matrixone/pkg/container/types"
-	"github.com/matrixorigin/matrixone/pkg/container/vector"
-	"github.com/matrixorigin/matrixone/pkg/logutil"
-	"github.com/matrixorigin/matrixone/pkg/vm/engine"
-	"github.com/matrixorigin/matrixone/pkg/vm/engine/tae/moengine"
-	"github.com/matrixorigin/matrixone/pkg/vm/mheap"
-	"github.com/matrixorigin/matrixone/pkg/vm/mmu/guest"
-	"github.com/matrixorigin/matrixone/pkg/vm/mmu/host"
-)
-
-var (
-	errorIsNotTaeEngine          = errors.New("the engine is not tae")
-	errorMissingCatalogTables    = errors.New("missing catalog tables")
-	errorMissingCatalogDatabases = errors.New("missing catalog databases")
-	//used in future
-	//errorNoSuchAttribute          = errors.New("no such attribute in the schema")
-	//errorAttributeTypeIsDifferent = errors.New("attribute type is different with that in the schema")
-	//errorAttributeIsNotPrimary    = errors.New("attribute is not primary key")
-)
-
-// CatalogSchemaAttribute defines the attribute of the schema
-type CatalogSchemaAttribute struct {
-	AttributeName string
-	AttributeType types.Type
-	IsPrimaryKey  bool
-	Comment       string
-}
-
-func (sca *CatalogSchemaAttribute) GetName() string {
-	return sca.AttributeName
-}
-
-func (sca *CatalogSchemaAttribute) GetType() types.Type {
-	return sca.AttributeType
-}
-
-func (sca *CatalogSchemaAttribute) GetIsPrimaryKey() bool {
-	return sca.IsPrimaryKey
-}
-
-func (sca *CatalogSchemaAttribute) GetComment() string {
-	return sca.Comment
-}
-
-// CatalogSchema defines the schema for the catalog
-type CatalogSchema struct {
-	Name       string
-	Attributes []*CatalogSchemaAttribute
-}
-
-func (mcs *CatalogSchema) GetName() string {
-	return mcs.Name
-}
-
-func (mcs *CatalogSchema) Length() int {
-	return len(mcs.Attributes)
-}
-
-func (mcs *CatalogSchema) GetAttributes() []*CatalogSchemaAttribute {
-	return mcs.Attributes
-}
-
-func (mcs *CatalogSchema) GetAttribute(i int) *CatalogSchemaAttribute {
-	return mcs.Attributes[i]
-}
-
-// DefineSchemaForMoDatabase decides the schema of the mo_database
-func DefineSchemaForMoDatabase() *CatalogSchema {
-	/*
-		mo_database schema
-
-		| Attribute         | Type         | Primary Key | Note   |
-		| ---------------- | ------------- | ---- | ------------- |
-		| datname          | varchar(256)  | PK   | database name |
-		| dat_catalog_name | varchar(256)  |      | catalog name  |
-		| dat_createsql    | varchar(4096) |      | create sql    |
-	*/
-	datNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "datname",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "database name",
-	}
-	datNameAttr.AttributeType.Width = 256
-
-	datCatalogNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "dat_catalog_name",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "catalog name",
-	}
-	datCatalogNameAttr.AttributeType.Width = 256
-
-	datCreatesqlAttr := &CatalogSchemaAttribute{
-		AttributeName: "dat_createsql",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "create sql",
-	}
-	datCreatesqlAttr.AttributeType.Width = 4096
-
-	attrs := []*CatalogSchemaAttribute{
-		datNameAttr,
-		datCatalogNameAttr,
-		datCreatesqlAttr,
-	}
-	return &CatalogSchema{Name: "mo_database", Attributes: attrs}
-}
-
-func PrepareInitialDataForMoDatabase() [][]string {
-	/*
-		hard code database:
-		mo_catalog
-	*/
-	data := [][]string{
-		{"mo_catalog", "def", "hardcode"},
-	}
-	return data
-}
-
-func FillInitialDataForMoDatabase() *batch.Batch {
-	schema := DefineSchemaForMoDatabase()
-	data := PrepareInitialDataForMoDatabase()
-	return PrepareInitialDataForSchema(schema, data)
-}
-
-func PrepareInitialDataForSchema(schema *CatalogSchema, data [][]string) *batch.Batch {
-	engineAttributeDefs := ConvertCatalogSchemaToEngineFormat(schema)
-	batch := AllocateBatchBasedOnEngineAttributeDefinition(engineAttributeDefs, len(data))
-	//fill batch with prepared data
-	FillBatchWithData(data, batch)
-	return batch
-}
-
-// DefineSchemaForMoTables decides the schema of the mo_tables
-func DefineSchemaForMoTables() *CatalogSchema {
-	/*
-		mo_tables schema
-
-		| Attribute      | Type           | Primary Key  | Note                                                                 |
-		| -------------- | ------------- | ----- | ---------------------------------------------------------------------------- |
-		| relname        | varchar(256)  | PK    | Name of the table, index, view, etc.                                         |
-		| reldatabase    | varchar(256)  | PK,FK | The database that contains this relation. reference mo_database.datname      |
-		| relpersistence | char(1)       |       | p = permanent table, t = temporary table                                     |
-		| relkind        | char(1)       |       | r = ordinary table, i = index, S = sequence, v = view, m = materialized view |
-		| rel_comment    | varchar(1024) |       | comment                                                                      |
-		| rel_createsql  | varchar(4096) |       | create sql                                                                   |
-	*/
-	relNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "relname",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "Name of the table, index, view, etc.",
-	}
-	relNameAttr.AttributeType.Width = 256
-
-	relDatabaseAttr := &CatalogSchemaAttribute{
-		AttributeName: "reldatabase",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "The database that contains this relation. reference mo_database.datname",
-	}
-	relDatabaseAttr.AttributeType.Width = 256
-
-	relPersistenceAttr := &CatalogSchemaAttribute{
-		AttributeName: "relpersistence",
-		AttributeType: types.T_char.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "p = permanent table, t = temporary table",
-	}
-	relPersistenceAttr.AttributeType.Width = 1
-
-	relKindAttr := &CatalogSchemaAttribute{
-		AttributeName: "relkind",
-		AttributeType: types.T_char.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "r = ordinary table, i = index, S = sequence, v = view, m = materialized view",
-	}
-	relKindAttr.AttributeType.Width = 1
-
-	relCommentAttr := &CatalogSchemaAttribute{
-		AttributeName: "rel_comment",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "comment",
-	}
-	relCommentAttr.AttributeType.Width = 1024
-
-	relCreatesqlAttr := &CatalogSchemaAttribute{
-		AttributeName: "rel_createsql",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "create sql",
-	}
-	relCreatesqlAttr.AttributeType.Width = 4096
-
-	attrs := []*CatalogSchemaAttribute{
-		relNameAttr,
-		relDatabaseAttr,
-		relPersistenceAttr,
-		relKindAttr,
-		relCommentAttr,
-		relCreatesqlAttr,
-	}
-	return &CatalogSchema{Name: "mo_tables", Attributes: attrs}
-}
-
-func PrepareInitialDataForMoTables() [][]string {
-	/*
-		hard code tables:
-		mo_database,mo_tables,mo_columns
-
-		tables created in the initdb step:
-		mo_global_variables,mo_user
-	*/
-	data := [][]string{
-		{"mo_database", "mo_catalog", "p", "r", "tae hardcode", "databases"},
-		{"mo_tables", "mo_catalog", "p", "r", "tae hardcode", "tables"},
-		{"mo_columns", "mo_catalog", "p", "r", "tae hardcode", "columns"},
-	}
-	return data
-}
-
-func FillInitialDataForMoTables() *batch.Batch {
-	schema := DefineSchemaForMoTables()
-	data := PrepareInitialDataForMoTables()
-	return PrepareInitialDataForSchema(schema, data)
-}
-
-// DefineSchemaForMoColumns decides the schema of the mo_columns
-func DefineSchemaForMoColumns() *CatalogSchema {
-	/*
-		mo_columns schema
-
-		| Attribute             | Type          | Primary Key  | Note                                                                                                                                                                     |
-		| --------------------- | ------------- | ----- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-		| att_database          | varchar(256)  | PK    | database                                                                                                                                                                        |
-		| att_relname           | varchar(256)  | PK,UK | The table this column belongs to.(references mo_tables.relname)                                                                                                                 |
-		| attname               | varchar(256)  | PK    | The column name                                                                                                                                                                 |
-		| atttyp                | int           |       | The data type of this column (zero for a dropped column).                                                                                                                       |
-		| attnum                | int           | UK    | The number of the column. Ordinary columns are numbered from 1 up.                                                                                                              |
-		| att_length            | int           |       | bytes count for the type.                                                                                                                                                       |
-		| attnotnull            | tinyint(1)    |       | This represents a not-null constraint.                                                                                                                                          |
-		| atthasdef             | tinyint(1)    |       | This column has a default expression or generation expression.                                                                                                                  |
-		| att_default           | varchar(1024) |       | default expression                                                                                                                                                              |
-		| attisdropped          | tinyint(1)    |       | This column has been dropped and is no longer valid. A dropped column is still physically present in the table, but is ignored by the parser and so cannot be accessed via SQL. |
-		| att_constraint_type   | char(1)       |       | p = primary key constraint, n=no constraint                                                                                                                                     |
-		| att_is_unsigned       | tinyint(1)    |       | unsigned or not                                                                                                                                                                 |
-		| att_is_auto_increment | tinyint       |       | auto increment or not                                                                                                                                                           |
-		| att_comment           | varchar(1024) |       | comment                                                                                                                                                                         |
-		| att_is_hidden         | tinyint(1)    |       | hidden or not                                                                                                                                                                   |
-	*/
-	attDatabaseAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_database",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "database",
-	}
-	attDatabaseAttr.AttributeType.Width = 256
-
-	attRelNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_relname",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "The table this column belongs to.(references mo_tables.relname)",
-	}
-	attRelNameAttr.AttributeType.Width = 256
-
-	attNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "attname",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "The column name ",
-	}
-	attNameAttr.AttributeType.Width = 256
-
-	attTypAttr := &CatalogSchemaAttribute{
-		AttributeName: "atttyp",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "The data type of this column (zero for a dropped column). ",
-	}
-
-	attNumAttr := &CatalogSchemaAttribute{
-		AttributeName: "attnum",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "The number of the column. Ordinary columns are numbered from 1 up.",
-	}
-
-	attLengthAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_length",
-		AttributeType: types.T_int32.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "bytes count for the type.",
-	}
-
-	attNotNullAttr := &CatalogSchemaAttribute{
-		AttributeName: "attnotnull",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "This represents a not-null constraint.",
-	}
-
-	attHasDefAttr := &CatalogSchemaAttribute{
-		AttributeName: "atthasdef",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "This column has a default expression or generation expression.",
-	}
-
-	attDefaultAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_default",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "default expression",
-	}
-	attDefaultAttr.AttributeType.Width = 1024
-
-	attIsDroppedAttr := &CatalogSchemaAttribute{
-		AttributeName: "attisdropped",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "This column has been dropped and is no longer valid. A dropped column is still physically present in the table, but is ignored by the parser and so cannot be accessed via SQL.",
-	}
-
-	attConstraintTypeAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_constraint_type",
-		AttributeType: types.T_char.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "p = primary key constraint, n=no constraint",
-	}
-	attConstraintTypeAttr.AttributeType.Width = 1
-
-	attIsUnsignedAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_is_unsigned",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "unsigned or not",
-	}
-
-	attIsAutoIncrementAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_is_auto_increment",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "auto increment or not ",
-	}
-
-	attCommentAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_comment",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "comment",
-	}
-	attCommentAttr.AttributeType.Width = 1024
-
-	attIsHiddenAttr := &CatalogSchemaAttribute{
-		AttributeName: "att_is_hidden",
-		AttributeType: types.T_int8.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "hidden or not",
-	}
-
-	attrs := []*CatalogSchemaAttribute{
-		attDatabaseAttr,
-		attRelNameAttr,
-		attNameAttr,
-		attTypAttr,
-		attNumAttr,
-		attLengthAttr,
-		attNotNullAttr,
-		attHasDefAttr,
-		attDefaultAttr,
-		attIsDroppedAttr,
-		attConstraintTypeAttr,
-		attIsUnsignedAttr,
-		attIsAutoIncrementAttr,
-		attCommentAttr,
-		attIsHiddenAttr,
-	}
-
-	return &CatalogSchema{Name: "mo_columns", Attributes: attrs}
-}
-
-func extractColumnsInfoFromAttribute(schema *CatalogSchema, i int) []string {
-	attr := schema.GetAttribute(i)
-	moColumnsSchema := DefineSchemaForMoColumns()
-	ret := make([]string, moColumnsSchema.Length())
-
-	//| Attribute             | Type          | Primary Key  | Note                                                                                                                                                                     |
-	//| --------------------- | ------------- | ----- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-	//| att_database          | varchar(256)  | PK    | database                                                                                                                                                                        |
-	ret[0] = "mo_catalog"
-	//| att_relname           | varchar(256)  | PK,UK | The table this column belongs to.(references mo_tables.relname)                                                                                                                 |
-	ret[1] = schema.GetName()
-	//| attname               | varchar(256)  | PK    | The column name                                                                                                                                                                 |
-	ret[2] = attr.GetName()
-	//| atttyp                | int           |       | The data type of this column (zero for a dropped column).                                                                                                                       |
-	ret[3] = fmt.Sprintf("%d", attr.GetType().Oid)
-	//| attnum                | int           | UK    | The number of the column. Ordinary columns are numbered from 1 up.                                                                                                              |
-	ret[4] = fmt.Sprintf("%d", i)
-	//| att_length            | int           |       | bytes count for the type.                                                                                                                                                       |
-	if attr.GetType().Oid == types.T_varchar || attr.GetType().Oid == types.T_char {
-		ret[5] = fmt.Sprintf("%d", attr.GetType().Width)
-	} else {
-		ret[5] = fmt.Sprintf("%d", attr.GetType().Size)
-	}
-	//| attnotnull            | tinyint(1)    |       | This represents a not-null constraint.                                                                                                                                          |
-	if attr.GetIsPrimaryKey() {
-		ret[6] = "1"
-	} else {
-		ret[6] = "0"
-	}
-	//| atthasdef             | tinyint(1)    |       | This column has a default expression or generation expression.                                                                                                                  |
-	ret[7] = "0"
-	//| att_default           | varchar(1024) |       | default expression                                                                                                                                                              |
-	ret[8] = "''"
-	//| attisdropped          | tinyint(1)    |       | This column has been dropped and is no longer valid. A dropped column is still physically present in the table, but is ignored by the parser and so cannot be accessed via SQL. |
-	ret[9] = "0"
-	//| att_constraint_type   | char(1)       |       | p = primary key constraint, n=no constraint                                                                                                                                     |
-	if attr.GetIsPrimaryKey() {
-		ret[10] = "p"
-	} else {
-		ret[10] = "n"
-	}
-	//| att_is_unsigned       | tinyint(1)    |       | unsigned or not                                                                                                                                                                 |
-	switch attr.GetType().Oid {
-	case types.T_uint8, types.T_uint16, types.T_uint32, types.T_uint64:
-		ret[11] = "1"
-	default:
-		ret[11] = "0"
-	}
-	//| att_is_auto_increment | tinyint       |       | auto increment or not                                                                                                                                                           |
-	ret[12] = "0"
-	//| att_comment           | varchar(1024) |       | comment                                                                                                                                                                         |
-	ret[13] = attr.GetComment()
-	//| att_is_hidden         | tinyint(1)    |       | hidden or not                                                                                                                                                                   |
-	ret[14] = "0"
-	return ret
-}
-
-func PrepareInitialDataForMoColumns() [][]string {
-	moDatabaseSchema := DefineSchemaForMoDatabase()
-	moDatabaseColumns := make([][]string, moDatabaseSchema.Length())
-	for i := 0; i < moDatabaseSchema.Length(); i++ {
-		moDatabaseColumns[i] = extractColumnsInfoFromAttribute(moDatabaseSchema, i)
-	}
-
-	moTablesSchema := DefineSchemaForMoTables()
-	moTablesColumns := make([][]string, moTablesSchema.Length())
-	for i := 0; i < moTablesSchema.Length(); i++ {
-		moTablesColumns[i] = extractColumnsInfoFromAttribute(moTablesSchema, i)
-	}
-
-	moColumnsSchema := DefineSchemaForMoColumns()
-	moColumnsColumns := make([][]string, moColumnsSchema.Length())
-	for i := 0; i < moColumnsSchema.Length(); i++ {
-		moColumnsColumns[i] = extractColumnsInfoFromAttribute(moColumnsSchema, i)
-	}
-
-	var data [][]string
-	data = append(data, moDatabaseColumns...)
-	data = append(data, moTablesColumns...)
-	data = append(data, moColumnsColumns...)
-	return data
-}
-
-func FillInitialDataForMoColumns() *batch.Batch {
-	schema := DefineSchemaForMoColumns()
-	data := PrepareInitialDataForMoColumns()
-	return PrepareInitialDataForSchema(schema, data)
-}
-
-// DefineSchemaForMoGlobalVariables decides the schema of the mo_global_variables
-func DefineSchemaForMoGlobalVariables() *CatalogSchema {
-	/*
-		mo_global_variables schema
-		   	  | Attribute       | Type            | Primary Key | Note  |
-		      | ----------------- | ------------- | ---- | --- |
-		      | gv_variable_name  | varchar(256)  | PK   |  |
-		      | gv_variable_value | varchar(1024) |      |  |
-	*/
-	gvVariableNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "gv_variable_name",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "",
-	}
-	gvVariableNameAttr.AttributeType.Width = 256
-
-	gvVariableValueAttr := &CatalogSchemaAttribute{
-		AttributeName: "gv_variable_value",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "",
-	}
-	gvVariableNameAttr.AttributeType.Width = 1024
-
-	attrs := []*CatalogSchemaAttribute{
-		gvVariableNameAttr,
-		gvVariableValueAttr,
-	}
-
-	return &CatalogSchema{Name: "mo_global_variables", Attributes: attrs}
-}
-
-func PrepareInitialDataForMoGlobalVariables() [][]string {
-	data := [][]string{
-		{"max_allowed_packet", "67108864"},
-		{"version_comment", "MatrixOne"},
-		{"port", "6001"},
-		{"host", "0.0.0.0"},
-		{"storePath", "./store"},
-		{"batchSizeInLoadData", "40000"},
-	}
-	return data
-}
-
-func FillInitialDataForMoGlobalVariables() *batch.Batch {
-	schema := DefineSchemaForMoGlobalVariables()
-	data := PrepareInitialDataForMoGlobalVariables()
-	return PrepareInitialDataForSchema(schema, data)
-}
-
-// DefineSchemaForMoUser decides the schema of the mo_table
-func DefineSchemaForMoUser() *CatalogSchema {
-	/*
-		mo_user schema
-		| Attribute        | Type         | Primary Key | Note        |
-		| --------- | ------------ | ---- | --------- |
-		| user_host | varchar(256) | PK   | user host |
-		| user_name | varchar(256) | PK   | user name |
-		| authentication_string | varchar(4096) |     | password |
-	*/
-	userHostAttr := &CatalogSchemaAttribute{
-		AttributeName: "user_host",
-		AttributeType: types.T_varchar.ToType(),
-		// Note: TAE now only support single PK. It should be part of primary key
-		// TODO: Set it as true if composite pk is ready
-		IsPrimaryKey: false,
-		Comment:      "user host",
-	}
-	userHostAttr.AttributeType.Width = 256
-
-	userNameAttr := &CatalogSchemaAttribute{
-		AttributeName: "user_name",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  true,
-		Comment:       "user name",
-	}
-	userNameAttr.AttributeType.Width = 256
-
-	passwordAttr := &CatalogSchemaAttribute{
-		AttributeName: "authentication_string",
-		AttributeType: types.T_varchar.ToType(),
-		IsPrimaryKey:  false,
-		Comment:       "password",
-	}
-	passwordAttr.AttributeType.Width = 256
-
-	attrs := []*CatalogSchemaAttribute{
-		userHostAttr,
-		userNameAttr,
-		passwordAttr,
-	}
-	return &CatalogSchema{Name: "mo_user", Attributes: attrs}
-}
-
-func PrepareInitialDataForMoUser() [][]string {
-	data := [][]string{
-		{"localhost", "root", "''"},
-		{"localhost", "dump", "111"},
-	}
-	return data
-}
-
-func FillInitialDataForMoUser() *batch.Batch {
-	schema := DefineSchemaForMoUser()
-	data := PrepareInitialDataForMoUser()
-	return PrepareInitialDataForSchema(schema, data)
-}
-
-// InitDB setups the initial catalog tables in tae
-func InitDB(ctx context.Context, tae engine.Engine) error {
-	taeEngine, ok := tae.(moengine.TxnEngine)
-	if !ok {
-		return errorIsNotTaeEngine
-	}
-
-	txnCtx, err := taeEngine.StartTxn(nil)
-	if err != nil {
-		return err
-	}
-	txnOperator := moengine.TxnToTxnOperator(txnCtx)
-
-	/*
-		stage 1: create catalog tables
-	*/
-	//1.get database mo_catalog handler
-	//TODO: use mo_catalog after tae is ready
-	catalogDbName := "mo_catalog"
-	//err = tae.Create(0, catalogDbName, 0, txnCtx.GetCtx())
-	//if err != nil {
-	//	logutil.Infof("create database %v failed.error:%v", catalogDbName, err)
-	//	err2 := txnCtx.Rollback()
-	//	if err2 != nil {
-	//		logutil.Infof("txnCtx rollback failed. error:%v", err2)
-	//		return err2
-	//	}
-	//	return err
-	//}
-
-	catalogDB, err := tae.Database(ctx, catalogDbName, txnOperator)
-	if err != nil {
-		logutil.Infof("get database %v failed.error:%v", catalogDbName, err)
-		err2 := txnCtx.Rollback()
-		if err2 != nil {
-			logutil.Infof("txnCtx rollback failed. error:%v", err2)
-			return err2
-		}
-		return err
-	}
-
-	//2. create table mo_global_variables
-	gvSch := DefineSchemaForMoGlobalVariables()
-	gvDefs := convertCatalogSchemaToTableDef(gvSch)
-	rel, _ := catalogDB.Relation(ctx, gvSch.GetName())
-
-	if rel == nil {
-		err = catalogDB.Create(ctx, gvSch.GetName(), gvDefs)
-		if err != nil {
-			logutil.Infof("create table %v failed.error:%v", gvSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-	}
-
-	if rel == nil {
-		//write initial data into mo_global_variables
-		gvTable, err := catalogDB.Relation(ctx, gvSch.GetName())
-		if err != nil {
-			logutil.Infof("get table %v failed.error:%v", gvSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-
-		gvBatch := FillInitialDataForMoGlobalVariables()
-		err = gvTable.Write(ctx, gvBatch)
-		if err != nil {
-			logutil.Infof("write into table %v failed.error:%v", gvSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-	}
-	userSch := DefineSchemaForMoUser()
-	userDefs := convertCatalogSchemaToTableDef(userSch)
-	rel, _ = catalogDB.Relation(ctx, userSch.GetName())
-	if rel == nil {
-		//3. create table mo_user
-		err = catalogDB.Create(ctx, userSch.GetName(), userDefs)
-		if err != nil {
-			logutil.Infof("create table %v failed.error:%v", userSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-
-		//write initial data into mo_user
-		userTable, err := catalogDB.Relation(ctx, userSch.GetName())
-		if err != nil {
-			logutil.Infof("get table %v failed.error:%v", userSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-
-		userBatch := FillInitialDataForMoUser()
-		err = userTable.Write(ctx, userBatch)
-		if err != nil {
-			logutil.Infof("write into table %v failed.error:%v", userSch.GetName(), err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-	}
-
-	/*
-		stage 2: create information_schema database.
-		Views in the information_schema need to created by 'create view'
-	*/
-	//1. create database information_schema
-	infoSchemaName := "information_schema"
-	db, _ := tae.Database(ctx, infoSchemaName, txnOperator)
-
-	if db == nil {
-		err = tae.Create(ctx, infoSchemaName, txnOperator)
-		if err != nil {
-			logutil.Infof("create database %v failed.error:%v", infoSchemaName, err)
-			err2 := txnCtx.Rollback()
-			if err2 != nil {
-				logutil.Infof("txnCtx rollback failed. error:%v", err2)
-				return err2
-			}
-			return err
-		}
-	}
-
-	//TODO: create views after the computation engine is ready
-	err = txnCtx.Commit()
-	if err != nil {
-		logutil.Infof("txnCtx commit failed.error:%v", err)
-		return err
-	}
-
-	return sanityCheck(ctx, tae)
-}
-
-// sanityCheck checks the catalog is ready or not
-func sanityCheck(ctx context.Context, tae engine.Engine) error {
-	taeEngine, ok := tae.(moengine.TxnEngine)
-	if !ok {
-		return errorIsNotTaeEngine
-	}
-
-	txnCtx, err := taeEngine.StartTxn(nil)
-	if err != nil {
-		return err
-	}
-	txnOperator := moengine.TxnToTxnOperator(txnCtx)
-
-	// databases: mo_catalog,information_schema
-	dbs, err := tae.Databases(ctx, txnOperator)
-	if err != nil {
-		return err
-	}
-	wantDbs := []string{"mo_catalog", "information_schema"}
-	if !isWanted(wantDbs, dbs) {
-		logutil.Infof("wantDbs %v,dbs %v", wantDbs, dbs)
-		return errorMissingCatalogDatabases
-	}
-
-	// database mo_catalog has tables:mo_database,mo_tables,mo_columns,mo_global_variables, mo_user
-	wantTablesOfMoCatalog := []string{"mo_database", "mo_tables", "mo_columns", "mo_global_variables", "mo_user"}
-	wantSchemasOfCatalog := []*CatalogSchema{
-		DefineSchemaForMoDatabase(),
-		DefineSchemaForMoTables(),
-		DefineSchemaForMoColumns(),
-		DefineSchemaForMoGlobalVariables(),
-		DefineSchemaForMoUser(),
-	}
-	catalogDbName := "mo_catalog"
-	err = isWantedDatabase(ctx, taeEngine, txnOperator, catalogDbName, wantTablesOfMoCatalog, wantSchemasOfCatalog)
-	if err != nil {
-		return err
-	}
-
-	err = txnCtx.Commit()
-	if err != nil {
-		logutil.Infof("txnCtx commit failed.error:%v", err)
-		return err
-	}
-
-	return nil
-}
-
-// isWanted checks the string slices are same
-func isWanted(want, actual []string) bool {
-	w := make([]string, len(want))
-	copy(w, want)
-	a := make([]string, len(actual))
-	copy(a, actual)
-	for i := 0; i < len(w); i++ {
-		if w[i] != a[i] {
-			return false
-		}
-	}
-	return true
-}
-
-// isWantedDatabase checks the database has the right tables
-func isWantedDatabase(ctx context.Context, taeEngine moengine.TxnEngine, txnOperator TxnOperator, dbName string, tables []string, schemas []*CatalogSchema) error {
-	db, err := taeEngine.Database(ctx, dbName, txnOperator)
-	if err != nil {
-		logutil.Infof("get database %v failed.error:%v", dbName, err)
-		err2 := txnOperator.Rollback(ctx)
-		if err2 != nil {
-			logutil.Infof("txnCtx rollback failed. error:%v", err2)
-			return err2
-		}
-		return err
-	}
-	tablesOfMoCatalog, err := db.Relations(ctx)
-	if err != nil {
-		return err
-	}
-	if !isWanted(tables, tablesOfMoCatalog) {
-		logutil.Infof("wantTables %v, tables %v", tables, tablesOfMoCatalog)
-		return errorMissingCatalogTables
-	}
-
-	//TODO:fix it after tae is ready
-	//check table attributes
-	for i, tableName := range tables {
-		err = isWantedTable(ctx, db, txnOperator, tableName, schemas[i])
-		if err != nil {
-			return err
-		}
-	}
-
-	return err
-}
-
-// isWantedTable checks the table has the right attributes
-func isWantedTable(ctx context.Context, db engine.Database, txnOperator TxnOperator, tableName string, schema *CatalogSchema) error {
-	table, err := db.Relation(ctx, tableName)
-	if err != nil {
-		logutil.Infof("get table %v failed.error:%v", tableName, err)
-		err2 := txnOperator.Rollback(ctx)
-		if err2 != nil {
-			logutil.Infof("txnCtx rollback failed. error:%v", err2)
-			return err2
-		}
-		return err
-	}
-	//TODO:fix it after tae is ready
-	/*
-		defs := table.TableDefs(txnCtx.GetCtx())
-
-			attrs := make(map[string]*CatalogSchemaAttribute)
-			for _, attr := range schema.GetAttributes() {
-				attrs[attr.GetName()] = attr
-			}
-
-			for _, def := range defs {
-				if attr, ok := def.(*engine.AttributeDef); ok {
-					if schemaAttr, ok2 := attrs[attr.Attr.Name]; ok2 {
-						if attr.Attr.Name != schemaAttr.GetName() {
-							logutil.Infof("def name %v schema name %v", attr.Attr.Name, schemaAttr.GetName())
-							return errorNoSuchAttribute
-						}
-						//TODO: fix it after the tae is ready
-						//if !attr.Attr.Type.Eq(schemaAttr.GetType()) {
-						//	return errorAttributeTypeIsDifferent
-						//}
-						if attr.Attr.Type.Oid != schemaAttr.GetType().Oid {
-							return errorAttributeTypeIsDifferent
-						}
-
-						//if !(attr.Attr.Primary && schemaAttr.GetIsPrimaryKey() ||
-						//	!attr.Attr.Primary && !schemaAttr.GetIsPrimaryKey()) {
-						//	return errorAttributeIsNotPrimary
-						//}
-					} else {
-						logutil.Infof("def name 1 %v", attr.Attr.Name)
-						return errorNoSuchAttribute
-					}
-				} else if attr, ok2 := def.(*engine.PrimaryIndexDef); ok2 {
-					for _, name := range attr.Names {
-						if schemaAttr, ok2 := attrs[name]; ok2 {
-							if !schemaAttr.GetIsPrimaryKey() {
-								return errorAttributeIsNotPrimary
-							}
-						} else {
-							logutil.Infof("def name 2 %v", name)
-							return errorNoSuchAttribute
-						}
-					}
-				}
-			}
-	*/
-	//read data from table
-	readers, err := table.NewReader(ctx, 1, nil, nil)
-	if err != nil {
-		return err
-	}
-	fieldNames := make([]string, schema.Length())
-	for i := 0; i < schema.Length(); i++ {
-		fieldNames[i] = schema.GetAttribute(i).GetName()
-	}
-	logutil.Infof("\nTable:%s \n\nAttributes:\n%v \n", tableName, fieldNames)
-	logutil.Infof("Datas:\n")
-	result, err := readers[0].Read(fieldNames, nil, mheap.New(guest.New(1<<20, host.New(1<<20))))
-	if err != nil {
-		return err
-	}
-	for i := 0; i < vector.Length(result.Vecs[0]); i++ {
-		line := FormatLineInBatch(result, i)
-		logutil.Infof("%v", line)
-	}
-	return nil
-}
-
-func convertCatalogSchemaToTableDef(sch *CatalogSchema) []engine.TableDef {
-	defs := make([]engine.TableDef, 0, len(sch.GetAttributes()))
-	var primaryKeyName []string
-
-	for _, attr := range sch.GetAttributes() {
-		if attr.GetIsPrimaryKey() {
-			primaryKeyName = append(primaryKeyName, attr.GetName())
-		}
-
-		defs = append(defs, &engine.AttributeDef{Attr: engine.Attribute{
-			Name:    attr.GetName(),
-			Alg:     0,
-			Type:    attr.GetType(),
-			Default: &plan.Default{},
-			Primary: attr.GetIsPrimaryKey(),
-		}})
-	}
-
-	if len(primaryKeyName) != 0 {
-		defs = append(defs, &engine.PrimaryIndexDef{
-			Names: primaryKeyName,
-		})
-	}
-	return defs
-}
diff --git a/pkg/frontend/init_db_test.go b/pkg/frontend/init_db_test.go
deleted file mode 100644
index 19b2a35eef51bce379f8b79e6767758be9493d43..0000000000000000000000000000000000000000
--- a/pkg/frontend/init_db_test.go
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2021 Matrix Origin
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package frontend
-
-import (
-	"github.com/matrixorigin/matrixone/pkg/container/batch"
-	"github.com/smartystreets/goconvey/convey"
-	"testing"
-)
-
-func TestPrepareInitialData(t *testing.T) {
-	convey.Convey("mo_database", t, func() {
-		sch := DefineSchemaForMoDatabase()
-		data := PrepareInitialDataForMoDatabase()
-		bat := FillInitialDataForMoDatabase()
-		convey.So(bat, convey.ShouldNotBeNil)
-		convey.So(batch.Length(bat), convey.ShouldEqual, len(data))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, len(data[0]))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, sch.Length())
-		for i, attr := range sch.GetAttributes() {
-			convey.So(attr.AttributeType.Eq(bat.Vecs[i].Typ), convey.ShouldBeTrue)
-		}
-		for i, line := range data {
-			s := FormatLineInBatch(bat, i)
-			convey.So(line, convey.ShouldResemble, s)
-		}
-	})
-
-	convey.Convey("mo_tables", t, func() {
-		sch := DefineSchemaForMoTables()
-		data := PrepareInitialDataForMoTables()
-		bat := FillInitialDataForMoTables()
-		convey.So(bat, convey.ShouldNotBeNil)
-		convey.So(batch.Length(bat), convey.ShouldEqual, len(data))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, len(data[0]))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, sch.Length())
-		for i, attr := range sch.GetAttributes() {
-			convey.So(attr.AttributeType.Eq(bat.Vecs[i].Typ), convey.ShouldBeTrue)
-		}
-		for i, line := range data {
-			s := FormatLineInBatch(bat, i)
-			convey.So(line, convey.ShouldResemble, s)
-		}
-	})
-
-	convey.Convey("mo_columns", t, func() {
-		sch := DefineSchemaForMoColumns()
-		data := PrepareInitialDataForMoColumns()
-		bat := FillInitialDataForMoColumns()
-		convey.So(bat, convey.ShouldNotBeNil)
-		convey.So(batch.Length(bat), convey.ShouldEqual, len(data))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, len(data[0]))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, sch.Length())
-		for i, attr := range sch.GetAttributes() {
-			convey.So(attr.AttributeType.Eq(bat.Vecs[i].Typ), convey.ShouldBeTrue)
-		}
-		for i, line := range data {
-			s := FormatLineInBatch(bat, i)
-			convey.So(line, convey.ShouldResemble, s)
-		}
-	})
-
-	convey.Convey("mo_global_variables", t, func() {
-		sch := DefineSchemaForMoGlobalVariables()
-		data := PrepareInitialDataForMoGlobalVariables()
-		bat := FillInitialDataForMoGlobalVariables()
-		convey.So(bat, convey.ShouldNotBeNil)
-		convey.So(batch.Length(bat), convey.ShouldEqual, len(data))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, len(data[0]))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, sch.Length())
-		for i, attr := range sch.GetAttributes() {
-			convey.So(attr.AttributeType.Eq(bat.Vecs[i].Typ), convey.ShouldBeTrue)
-		}
-		for i, line := range data {
-			s := FormatLineInBatch(bat, i)
-			convey.So(line, convey.ShouldResemble, s)
-		}
-	})
-
-	convey.Convey("mo_user", t, func() {
-		sch := DefineSchemaForMoUser()
-		data := PrepareInitialDataForMoUser()
-		bat := FillInitialDataForMoUser()
-		convey.So(bat, convey.ShouldNotBeNil)
-		convey.So(batch.Length(bat), convey.ShouldEqual, len(data))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, len(data[0]))
-		convey.So(len(bat.Vecs), convey.ShouldEqual, sch.Length())
-		for i, attr := range sch.GetAttributes() {
-			convey.So(attr.AttributeType.Eq(bat.Vecs[i].Typ), convey.ShouldBeTrue)
-		}
-		for i, line := range data {
-			s := FormatLineInBatch(bat, i)
-			convey.So(line, convey.ShouldResemble, s)
-		}
-	})
-}
diff --git a/pkg/frontend/mysql_cmd_executor_test.go b/pkg/frontend/mysql_cmd_executor_test.go
index 4902744e412520a21bdd713dabf21920ff232a53..5640a445475492d5d029d30ea6efdc3a6be82c1b 100644
--- a/pkg/frontend/mysql_cmd_executor_test.go
+++ b/pkg/frontend/mysql_cmd_executor_test.go
@@ -893,8 +893,8 @@ func Test_CMD_FIELD_LIST(t *testing.T) {
 		table := mock_frontend.NewMockRelation(ctrl)
 		db.EXPECT().Relation(ctx, "t").Return(table, nil).AnyTimes()
 		defs := []engine.TableDef{
-			&engine.AttributeDef{Attr: engine.Attribute{Name: "a", Type: toTypesType(types.T_char)}},
-			&engine.AttributeDef{Attr: engine.Attribute{Name: "b", Type: toTypesType(types.T_int32)}},
+			&engine.AttributeDef{Attr: engine.Attribute{Name: "a", Type: types.T_char.ToType()}},
+			&engine.AttributeDef{Attr: engine.Attribute{Name: "b", Type: types.T_int32.ToType()}},
 		}
 
 		table.EXPECT().TableDefs(ctx).Return(defs, nil).AnyTimes()
diff --git a/pkg/frontend/session.go b/pkg/frontend/session.go
index 116221ce22710ed46cd9f7bdfb00831452af8178..a5d4a68a6cd227b912511d113eaf1ec175d0c5c8 100644
--- a/pkg/frontend/session.go
+++ b/pkg/frontend/session.go
@@ -760,11 +760,6 @@ func (th *TxnHandler) GetStorage() engine.Engine {
 	return th.storage
 }
 
-func (th *TxnHandler) IsTaeEngine() bool {
-	_, ok := th.storage.(moengine.TxnEngine)
-	return ok
-}
-
 func (th *TxnHandler) GetTxn() TxnOperator {
 	err := th.ses.TxnStart()
 	if err != nil {
diff --git a/pkg/frontend/util.go b/pkg/frontend/util.go
index 31bfdacec82918c63be6613c762765104e638373..5ccb334a4bb3bc97728c7ac910d0bbda1fa7560b 100644
--- a/pkg/frontend/util.go
+++ b/pkg/frontend/util.go
@@ -17,7 +17,6 @@ package frontend
 import (
 	"bytes"
 	"fmt"
-	"github.com/matrixorigin/matrixone/pkg/pb/plan"
 	"go/constant"
 	"os"
 	"runtime"
@@ -29,10 +28,7 @@ import (
 
 	"github.com/matrixorigin/matrixone/pkg/sql/parsers/tree"
 
-	"github.com/matrixorigin/matrixone/pkg/container/batch"
-	"github.com/matrixorigin/matrixone/pkg/container/nulls"
 	"github.com/matrixorigin/matrixone/pkg/container/types"
-	"github.com/matrixorigin/matrixone/pkg/container/vector"
 
 	mo_config "github.com/matrixorigin/matrixone/pkg/config"
 	"github.com/matrixorigin/matrixone/pkg/logutil"
@@ -322,470 +318,6 @@ func getParameterUnit(configFile string, eng engine.Engine, txnClient TxnClient)
 	return pu, nil
 }
 
-func ConvertCatalogSchemaToEngineFormat(mcs *CatalogSchema) []*engine.AttributeDef {
-	genAttr := func(attr *CatalogSchemaAttribute) *engine.AttributeDef {
-		return &engine.AttributeDef{
-			Attr: engine.Attribute{
-				Name:    attr.AttributeName,
-				Alg:     0,
-				Type:    attr.AttributeType,
-				Default: &plan.Default{},
-			}}
-	}
-
-	attrs := make([]*engine.AttributeDef, 0, mcs.Length())
-	for _, attr := range mcs.GetAttributes() {
-		attrs = append(attrs, genAttr(attr))
-	}
-	return attrs
-}
-
-func toTypesType(t types.T) types.Type {
-	return t.ToType()
-}
-
-func AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs []*engine.AttributeDef, rowCount int) *batch.Batch {
-	var attributeNames = make([]string, len(attributeDefs))
-	for i, def := range attributeDefs {
-		attributeNames[i] = def.Attr.Name
-	}
-	batchData := batch.New(true, attributeNames)
-
-	batchData.Zs = make([]int64, rowCount)
-	for i := 0; i < rowCount; i++ {
-		batchData.Zs[i] = 1
-	}
-
-	//alloc space for vector
-	for i, def := range attributeDefs {
-		vec := vector.New(def.Attr.Type)
-		vec.Or = true
-		vec.Data = nil
-		switch vec.Typ.Oid {
-		case types.T_bool:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_bool).Size))
-			vec.Col = types.DecodeBoolSlice(vec.Data)
-		case types.T_int8:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_int8).Size))
-			vec.Col = types.DecodeInt8Slice(vec.Data)
-		case types.T_int16:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_int16).Size))
-			vec.Col = types.DecodeInt16Slice(vec.Data)
-		case types.T_int32:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_int32).Size))
-			vec.Col = types.DecodeInt32Slice(vec.Data)
-		case types.T_int64:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_int64).Size))
-			vec.Col = types.DecodeInt64Slice(vec.Data)
-		case types.T_uint8:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_uint8).Size))
-			vec.Col = types.DecodeUint8Slice(vec.Data)
-		case types.T_uint16:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_uint16).Size))
-			vec.Col = types.DecodeUint16Slice(vec.Data)
-		case types.T_uint32:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_uint32).Size))
-			vec.Col = types.DecodeUint32Slice(vec.Data)
-		case types.T_uint64:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_uint64).Size))
-			vec.Col = types.DecodeUint64Slice(vec.Data)
-		case types.T_float32:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_float32).Size))
-			vec.Col = types.DecodeFloat32Slice(vec.Data)
-		case types.T_float64:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_float64).Size))
-			vec.Col = types.DecodeFloat64Slice(vec.Data)
-		case types.T_char, types.T_varchar, types.T_json:
-			vBytes := &types.Bytes{
-				Offsets: make([]uint32, rowCount),
-				Lengths: make([]uint32, rowCount),
-				Data:    nil,
-			}
-			vec.Col = vBytes
-			vec.Data = vBytes.Data
-		case types.T_date:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_date).Size))
-			vec.Col = types.DecodeDateSlice(vec.Data)
-		case types.T_datetime:
-			vec.Data = make([]byte, rowCount*int(toTypesType(types.T_datetime).Size))
-			vec.Col = types.DecodeDatetimeSlice(vec.Data)
-		default:
-			panic("unsupported vector type")
-		}
-		batchData.Vecs[i] = vec
-	}
-	batchData.Attrs = attributeNames
-	return batchData
-}
-
-func FillBatchWithData(data [][]string, batch *batch.Batch) {
-	for i, line := range data {
-		rowIdx := i
-		for j, field := range line {
-			colIdx := j
-
-			isNullOrEmpty := len(field) == 0 || field == "\\N"
-			vec := batch.Vecs[colIdx]
-
-			switch vec.Typ.Oid {
-			case types.T_bool:
-				cols := vec.Col.([]bool)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := types.ParseBool(field)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-					}
-					cols[rowIdx] = d
-				}
-			case types.T_int8:
-				cols := vec.Col.([]int8)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseInt(field, 10, 8)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = int8(d)
-				}
-			case types.T_int16:
-				cols := vec.Col.([]int16)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseInt(field, 10, 16)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = int16(d)
-				}
-			case types.T_int32:
-				cols := vec.Col.([]int32)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseInt(field, 10, 32)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = int32(d)
-				}
-			case types.T_int64:
-				cols := vec.Col.([]int64)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseInt(field, 10, 64)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = d
-				}
-			case types.T_uint8:
-				cols := vec.Col.([]uint8)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseUint(field, 10, 8)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = uint8(d)
-				}
-			case types.T_uint16:
-				cols := vec.Col.([]uint16)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseUint(field, 10, 16)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = uint16(d)
-				}
-			case types.T_uint32:
-				cols := vec.Col.([]uint32)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseUint(field, 10, 32)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = uint32(d)
-				}
-			case types.T_uint64:
-				cols := vec.Col.([]uint64)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseUint(field, 10, 64)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = uint64(d)
-				}
-			case types.T_float32:
-				cols := vec.Col.([]float32)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					d, err := strconv.ParseFloat(field, 32)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = float32(d)
-				}
-			case types.T_float64:
-				cols := vec.Col.([]float64)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					fs := field
-					d, err := strconv.ParseFloat(fs, 64)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = d
-				}
-			case types.T_char, types.T_varchar, types.T_json:
-				vBytes := vec.Col.(*types.Bytes)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-					vBytes.Offsets[rowIdx] = uint32(len(vBytes.Data))
-					vBytes.Lengths[rowIdx] = uint32(0)
-				} else {
-					vBytes.Offsets[rowIdx] = uint32(len(vBytes.Data))
-					vBytes.Data = append(vBytes.Data, field...)
-					vBytes.Lengths[rowIdx] = uint32(len(field))
-				}
-			case types.T_date:
-				cols := vec.Col.([]types.Date)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					fs := field
-					d, err := types.ParseDate(fs)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = d
-				}
-			case types.T_datetime:
-				cols := vec.Col.([]types.Datetime)
-				if isNullOrEmpty {
-					nulls.Add(vec.Nsp, uint64(rowIdx))
-				} else {
-					fs := field
-					d, err := types.ParseDatetime(fs, vec.Typ.Precision)
-					if err != nil {
-						logutil.Errorf("parse field[%v] err:%v", field, err)
-						d = 0
-					}
-					cols[rowIdx] = d
-				}
-			default:
-				panic("unsupported oid")
-			}
-		}
-	}
-}
-
-func FormatLineInBatch(bat *batch.Batch, rowIndex int) []string {
-	row := make([]interface{}, len(bat.Vecs))
-	var res []string
-	for i := 0; i < len(bat.Vecs); i++ {
-		vec := bat.Vecs[i]
-		switch vec.Typ.Oid { //get col
-		case types.T_bool:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]bool)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]bool)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_int8:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]int8)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]int8)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_uint8:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]uint8)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]uint8)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_int16:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]int16)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]int16)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_uint16:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]uint16)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]uint16)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_int32:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]int32)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]int32)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_uint32:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]uint32)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]uint32)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_int64:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]int64)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]int64)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_uint64:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]uint64)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]uint64)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_float32:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]float32)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]float32)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_float64:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]float64)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]float64)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_char, types.T_varchar, types.T_json:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.(*types.Bytes)
-				row[i] = string(vs.Get(int64(rowIndex)))
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.(*types.Bytes)
-					row[i] = string(vs.Get(int64(rowIndex)))
-				}
-			}
-		case types.T_date:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]types.Date)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]types.Date)
-					row[i] = vs[rowIndex]
-				}
-			}
-		case types.T_datetime:
-			if !nulls.Any(vec.Nsp) { //all data in this column are not null
-				vs := vec.Col.([]types.Datetime)
-				row[i] = vs[rowIndex]
-			} else {
-				if nulls.Contains(vec.Nsp, uint64(rowIndex)) { //is null
-					row[i] = nil
-				} else {
-					vs := vec.Col.([]types.Datetime)
-					row[i] = vs[rowIndex]
-				}
-			}
-		default:
-			panic(fmt.Sprintf("reader.Read : unsupported type %d \n", vec.Typ.Oid))
-		}
-		res = append(res, fmt.Sprintf("%v", row[i]))
-	}
-	return res
-}
-
 // WildcardMatch implements wildcard pattern match algorithm.
 // pattern and target are ascii characters
 // TODO: add \_ and \%
diff --git a/pkg/frontend/util_test.go b/pkg/frontend/util_test.go
index c2291755d0eb6d964bd15656bab4dec5d0e6eb89..833f1bae47dc12213b9e1cfcc9ec0093937a6303 100644
--- a/pkg/frontend/util_test.go
+++ b/pkg/frontend/util_test.go
@@ -21,14 +21,9 @@ import (
 	"testing"
 	"time"
 
-	"github.com/matrixorigin/matrixone/pkg/container/batch"
-	"github.com/matrixorigin/matrixone/pkg/container/nulls"
-	"github.com/matrixorigin/matrixone/pkg/container/types"
-	"github.com/matrixorigin/matrixone/pkg/container/vector"
 	"github.com/matrixorigin/matrixone/pkg/sql/parsers"
 	"github.com/matrixorigin/matrixone/pkg/sql/parsers/dialect"
 	"github.com/matrixorigin/matrixone/pkg/sql/parsers/tree"
-	"github.com/matrixorigin/matrixone/pkg/vm/engine"
 	cvey "github.com/smartystreets/goconvey/convey"
 	"github.com/stretchr/testify/require"
 )
@@ -495,311 +490,3 @@ func TestGetSimpleExprValue(t *testing.T) {
 
 	})
 }
-
-func Test_AllocateBatchBasedOnEngineAttributeDefinition(t *testing.T) {
-	var attributeDefs []*engine.AttributeDef
-	var rowCount = 1
-	var colNum = 14
-	var tmp = &engine.AttributeDef{}
-	var ret *batch.Batch
-	cvey.Convey("", t, func() {
-		attributeDefs = make([]*engine.AttributeDef, colNum)
-		tmp.Attr.Type.Oid = types.T_bool
-
-		for i := 0; i < colNum; i++ {
-			attributeDefs[i] = tmp
-		}
-
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[0].Data, cvey.ShouldResemble, []byte{0})
-		cvey.So(ret.Vecs[0].Data, cvey.ShouldResemble, []byte{0})
-
-		tmp.Attr.Type.Oid = types.T_int8
-		attributeDefs[1] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[1].Data, cvey.ShouldResemble, []byte{0})
-		cvey.So(ret.Vecs[1].Data, cvey.ShouldResemble, []byte{0})
-
-		tmp.Attr.Type.Oid = types.T_int16
-		attributeDefs[2] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[2].Data, cvey.ShouldResemble, make([]byte, 2))
-		cvey.So(ret.Vecs[2].Data, cvey.ShouldResemble, make([]byte, 2))
-
-		tmp.Attr.Type.Oid = types.T_int32
-		attributeDefs[3] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[3].Data, cvey.ShouldResemble, make([]byte, 4))
-		cvey.So(ret.Vecs[3].Data, cvey.ShouldResemble, make([]byte, 4))
-
-		tmp.Attr.Type.Oid = types.T_int64
-		attributeDefs[4] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[4].Data, cvey.ShouldResemble, make([]byte, 8))
-		cvey.So(ret.Vecs[4].Data, cvey.ShouldResemble, make([]byte, 8))
-
-		tmp.Attr.Type.Oid = types.T_uint8
-		attributeDefs[5] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[5].Data, cvey.ShouldResemble, []byte{0})
-		cvey.So(ret.Vecs[5].Data, cvey.ShouldResemble, []byte{0})
-
-		tmp.Attr.Type.Oid = types.T_uint16
-		attributeDefs[6] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[6].Data, cvey.ShouldResemble, make([]byte, 2))
-		cvey.So(ret.Vecs[6].Data, cvey.ShouldResemble, make([]byte, 2))
-
-		tmp.Attr.Type.Oid = types.T_uint32
-		attributeDefs[7] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[7].Data, cvey.ShouldResemble, make([]byte, 4))
-		cvey.So(ret.Vecs[7].Data, cvey.ShouldResemble, make([]byte, 4))
-
-		tmp.Attr.Type.Oid = types.T_uint64
-		attributeDefs[8] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[8].Data, cvey.ShouldResemble, make([]byte, 8))
-		cvey.So(ret.Vecs[8].Data, cvey.ShouldResemble, make([]byte, 8))
-
-		tmp.Attr.Type.Oid = types.T_float32
-		attributeDefs[9] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[9].Data, cvey.ShouldResemble, make([]byte, 4))
-		cvey.So(ret.Vecs[9].Data, cvey.ShouldResemble, make([]byte, 4))
-
-		tmp.Attr.Type.Oid = types.T_float64
-		attributeDefs[10] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[10].Data, cvey.ShouldResemble, make([]byte, 8))
-		cvey.So(ret.Vecs[10].Data, cvey.ShouldResemble, make([]byte, 8))
-
-		tmp.Attr.Type.Oid = types.T_char
-		attributeDefs[11] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[11].Data, cvey.ShouldResemble, []byte(nil))
-		cvey.So(ret.Vecs[11].Data, cvey.ShouldResemble, []byte(nil))
-
-		tmp.Attr.Type.Oid = types.T_date
-		attributeDefs[12] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[12].Data, cvey.ShouldResemble, make([]byte, 4))
-		cvey.So(ret.Vecs[12].Data, cvey.ShouldResemble, make([]byte, 4))
-
-		tmp.Attr.Type.Oid = types.T_datetime
-		attributeDefs[13] = tmp
-		ret = AllocateBatchBasedOnEngineAttributeDefinition(attributeDefs, rowCount)
-		cvey.So(ret.Vecs[13].Data, cvey.ShouldResemble, make([]byte, 8))
-		cvey.So(ret.Vecs[13].Data, cvey.ShouldResemble, make([]byte, 8))
-	})
-}
-
-func Test_FillBatchWithData(t *testing.T) {
-	var data [][]string
-	var batch = &batch.Batch{}
-	var colNum = 14
-	cvey.Convey("FillBatchWithData succ", t, func() {
-		data = make([][]string, 1)
-		data[0] = make([]string, 1)
-		batch.Vecs = make([]*vector.Vector, colNum)
-		batch.Vecs[0] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_bool}}
-		batch.Vecs[0].Col = make([]bool, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][0] = "true"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[1] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int8}}
-		batch.Vecs[1].Col = make([]int8, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][1] = "1"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[2] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int16}}
-		batch.Vecs[2].Col = make([]int16, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][2] = "2"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[3] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int32}}
-		batch.Vecs[3].Col = make([]int32, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][3] = "3"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[4] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int64}}
-		batch.Vecs[4].Col = make([]int64, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][4] = "4"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[5] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint8}}
-		batch.Vecs[5].Col = make([]uint8, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][5] = "5"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[6] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint16}}
-		batch.Vecs[6].Col = make([]uint16, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][6] = "5"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[7] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint32}}
-		batch.Vecs[7].Col = make([]uint32, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][7] = "7"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[8] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint64}}
-		batch.Vecs[8].Col = make([]uint64, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][8] = "5"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[9] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_float32}}
-		batch.Vecs[9].Col = make([]float32, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][9] = "9"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[10] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_float64}}
-		batch.Vecs[10].Col = make([]float64, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][10] = "10"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[11] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_date}}
-		batch.Vecs[11].Col = make([]types.Date, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][11] = "2022-07-13"
-		FillBatchWithData(data, batch)
-
-		data[0] = append(data[0], string(""))
-		batch.Vecs[12] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_datetime}}
-		batch.Vecs[12].Col = make([]types.Datetime, 1)
-		FillBatchWithData(data, batch)
-
-		data[0][12] = "2022-07-13 11:11:11.1234"
-		FillBatchWithData(data, batch)
-	})
-}
-
-func Test_FormatLineInBatch(t *testing.T) {
-	var bat = &batch.Batch{}
-	var rowIndex = 0
-	var res []string
-	var colNum = 13
-	var colName = []string{"false", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0001-01-01", "0001-01-01 00:00:00"}
-	cvey.Convey("FormatLineInBatch succ", t, func() {
-		bat.Vecs = make([]*vector.Vector, colNum)
-		bat.Vecs[0] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_bool}}
-		bat.Vecs[0].Col = make([]bool, 1)
-
-		bat.Vecs[1] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int8}}
-		bat.Vecs[1].Col = make([]int8, 1)
-
-		bat.Vecs[2] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int16}}
-		bat.Vecs[2].Col = make([]int16, 1)
-
-		bat.Vecs[3] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int32}}
-		bat.Vecs[3].Col = make([]int32, 1)
-
-		bat.Vecs[4] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_int64}}
-		bat.Vecs[4].Col = make([]int64, 1)
-
-		bat.Vecs[5] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint8}}
-		bat.Vecs[5].Col = make([]uint8, 1)
-
-		bat.Vecs[6] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint16}}
-		bat.Vecs[6].Col = make([]uint16, 1)
-
-		bat.Vecs[7] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint32}}
-		bat.Vecs[7].Col = make([]uint32, 1)
-
-		bat.Vecs[8] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_uint64}}
-		bat.Vecs[8].Col = make([]uint64, 1)
-
-		bat.Vecs[9] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_float32}}
-		bat.Vecs[9].Col = make([]float32, 1)
-
-		bat.Vecs[10] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_float64}}
-		bat.Vecs[10].Col = make([]float64, 1)
-
-		bat.Vecs[11] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_date}}
-		bat.Vecs[11].Col = make([]types.Date, 1)
-
-		bat.Vecs[12] = &vector.Vector{Nsp: &nulls.Nulls{}, Typ: types.Type{Oid: types.T_datetime}}
-		bat.Vecs[12].Col = make([]types.Datetime, 1)
-
-		res = FormatLineInBatch(bat, rowIndex)
-		cvey.So(res, cvey.ShouldResemble, colName)
-
-		bat.Vecs[0].Col = make([]bool, 2)
-		nulls.Add(bat.Vecs[0].Nsp, 1)
-
-		bat.Vecs[1].Col = make([]int8, 2)
-		nulls.Add(bat.Vecs[1].Nsp, 1)
-
-		bat.Vecs[2].Col = make([]int16, 2)
-		nulls.Add(bat.Vecs[2].Nsp, 1)
-
-		bat.Vecs[3].Col = make([]int32, 2)
-		nulls.Add(bat.Vecs[3].Nsp, 1)
-
-		bat.Vecs[4].Col = make([]int64, 2)
-		nulls.Add(bat.Vecs[4].Nsp, 1)
-
-		bat.Vecs[5].Col = make([]uint8, 2)
-		nulls.Add(bat.Vecs[5].Nsp, 1)
-
-		bat.Vecs[6].Col = make([]uint16, 2)
-		nulls.Add(bat.Vecs[6].Nsp, 1)
-
-		bat.Vecs[7].Col = make([]uint32, 2)
-		nulls.Add(bat.Vecs[7].Nsp, 1)
-
-		bat.Vecs[8].Col = make([]uint64, 2)
-		nulls.Add(bat.Vecs[8].Nsp, 1)
-
-		bat.Vecs[9].Col = make([]float32, 2)
-		nulls.Add(bat.Vecs[9].Nsp, 1)
-
-		bat.Vecs[10].Col = make([]float64, 2)
-		nulls.Add(bat.Vecs[10].Nsp, 1)
-
-		bat.Vecs[11].Col = make([]types.Date, 2)
-		nulls.Add(bat.Vecs[11].Nsp, 1)
-
-		bat.Vecs[12].Col = make([]types.Datetime, 2)
-		nulls.Add(bat.Vecs[12].Nsp, 1)
-
-		res = FormatLineInBatch(bat, rowIndex)
-		cvey.So(res, cvey.ShouldResemble, colName)
-
-		res = FormatLineInBatch(bat, 1)
-		cvey.So(res, cvey.ShouldResemble, []string{"<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>", "<nil>"})
-	})
-}
diff --git a/pkg/txn/storage/txn/catalog_handler.go b/pkg/txn/storage/txn/catalog_handler.go
index 1e7c39202b29ead7bea49ef4ba45327862fe8b2f..cff11a29fe48b62c26b0760902282aede5d3e0e2 100644
--- a/pkg/txn/storage/txn/catalog_handler.go
+++ b/pkg/txn/storage/txn/catalog_handler.go
@@ -23,10 +23,11 @@ import (
 	"github.com/matrixorigin/matrixone/pkg/container/batch"
 	"github.com/matrixorigin/matrixone/pkg/container/types"
 	"github.com/matrixorigin/matrixone/pkg/container/vector"
-	"github.com/matrixorigin/matrixone/pkg/frontend"
 	"github.com/matrixorigin/matrixone/pkg/pb/timestamp"
 	"github.com/matrixorigin/matrixone/pkg/pb/txn"
 	"github.com/matrixorigin/matrixone/pkg/vm/engine"
+	"github.com/matrixorigin/matrixone/pkg/vm/engine/tae/catalog"
+	"github.com/matrixorigin/matrixone/pkg/vm/engine/tae/moengine"
 	txnengine "github.com/matrixorigin/matrixone/pkg/vm/engine/txn"
 )
 
@@ -56,66 +57,84 @@ func NewCatalogHandler(upstream *MemHandler) *CatalogHandler {
 	// database
 	handler.database = &DatabaseRow{
 		ID:   uuid.NewString(),
-		Name: "mo_catalog", // hardcoded in frontend package
+		Name: catalog.SystemDBName, // hardcoded in frontend package
 	}
 
 	// relations
 	databasesRelRow := &RelationRow{
 		ID:         uuid.NewString(),
 		DatabaseID: handler.database.ID,
-		Name:       "mo_database", // hardcoded in frontend package
+		Name:       catalog.SystemTable_DB_Name, // hardcoded in frontend package
 		Type:       txnengine.RelationTable,
 	}
 	handler.relations[databasesRelRow.ID] = databasesRelRow
 	tablesRelRow := &RelationRow{
 		ID:         uuid.NewString(),
 		DatabaseID: handler.database.ID,
-		Name:       "mo_tables", // hardcoded in frontend package
+		Name:       catalog.SystemTable_Table_Name, // hardcoded in frontend package
 		Type:       txnengine.RelationTable,
 	}
 	handler.relations[tablesRelRow.ID] = tablesRelRow
 	attributesRelRow := &RelationRow{
 		ID:         uuid.NewString(),
 		DatabaseID: handler.database.ID,
-		Name:       "mo_columns", // hardcoded in frontend package
+		Name:       catalog.SystemTable_Columns_Name, // hardcoded in frontend package
 		Type:       txnengine.RelationTable,
 	}
 	handler.relations[attributesRelRow.ID] = attributesRelRow
 
 	// attributes
 	// databases
-	for i, def := range frontend.ConvertCatalogSchemaToEngineFormat(
-		frontend.DefineSchemaForMoDatabase(),
-	) {
+	defs, err := moengine.SchemaToDefs(catalog.SystemDBSchema)
+	if err != nil {
+		panic(err)
+	}
+	for i, def := range defs {
+		attr, ok := def.(*engine.AttributeDef)
+		if !ok {
+			continue
+		}
 		row := &AttributeRow{
 			ID:         uuid.NewString(),
 			RelationID: databasesRelRow.ID,
 			Order:      i,
-			Attribute:  def.Attr,
+			Attribute:  attr.Attr,
 		}
 		handler.attributes[row.ID] = row
 	}
 	// relations
-	for i, def := range frontend.ConvertCatalogSchemaToEngineFormat(
-		frontend.DefineSchemaForMoTables(),
-	) {
+	defs, err = moengine.SchemaToDefs(catalog.SystemTableSchema)
+	if err != nil {
+		panic(err)
+	}
+	for i, def := range defs {
+		attr, ok := def.(*engine.AttributeDef)
+		if !ok {
+			continue
+		}
 		row := &AttributeRow{
 			ID:         uuid.NewString(),
 			RelationID: tablesRelRow.ID,
 			Order:      i,
-			Attribute:  def.Attr,
+			Attribute:  attr.Attr,
 		}
 		handler.attributes[row.ID] = row
 	}
 	// attributes
-	for i, def := range frontend.ConvertCatalogSchemaToEngineFormat(
-		frontend.DefineSchemaForMoColumns(),
-	) {
+	defs, err = moengine.SchemaToDefs(catalog.SystemColumnSchema)
+	if err != nil {
+		panic(err)
+	}
+	for i, def := range defs {
+		attr, ok := def.(*engine.AttributeDef)
+		if !ok {
+			continue
+		}
 		row := &AttributeRow{
 			ID:         uuid.NewString(),
 			RelationID: attributesRelRow.ID,
 			Order:      i,
-			Attribute:  def.Attr,
+			Attribute:  attr.Attr,
 		}
 		handler.attributes[row.ID] = row
 	}
@@ -327,17 +346,17 @@ func (c *CatalogHandler) HandleNewTableIter(meta txn.TxnMeta, req txnengine.NewT
 
 		var iter any
 		switch rel.Name {
-		case "mo_database":
+		case catalog.SystemTable_DB_Name:
 			iter = &Iter[Text, DatabaseRow]{
 				TableIter: c.upstream.databases.NewIter(tx),
 				AttrsMap:  attrsMap,
 			}
-		case "mo_tables":
+		case catalog.SystemTable_Table_Name:
 			iter = &Iter[Text, RelationRow]{
 				TableIter: c.upstream.relations.NewIter(tx),
 				AttrsMap:  attrsMap,
 			}
-		case "mo_columns":
+		case catalog.SystemTable_Columns_Name:
 			iter = &Iter[Text, AttributeRow]{
 				TableIter: c.upstream.attributes.NewIter(tx),
 				AttrsMap:  attrsMap,
@@ -386,6 +405,7 @@ func (c *CatalogHandler) HandlePrepare(meta txn.TxnMeta) (timestamp.Timestamp, e
 }
 
 func (c *CatalogHandler) HandleRead(meta txn.TxnMeta, req txnengine.ReadReq, resp *txnengine.ReadResp) error {
+	tx := c.upstream.getTx(meta)
 
 	c.iterators.Lock()
 	v, ok := c.iterators.Map[req.IterID]
@@ -417,11 +437,11 @@ func (c *CatalogHandler) HandleRead(meta txn.TxnMeta, req txnengine.ReadReq, res
 					var value any
 
 					switch name {
-					case "datname":
+					case catalog.SystemDBAttr_Name:
 						value = row.Name
-					case "dat_catalog_name":
+					case catalog.SystemDBAttr_CatalogName:
 						value = ""
-					case "dat_createsql":
+					case catalog.SystemDBAttr_CreateSQL:
 						value = ""
 					default:
 						resp.ErrColumnNotFound.Name = name
@@ -456,17 +476,21 @@ func (c *CatalogHandler) HandleRead(meta txn.TxnMeta, req txnengine.ReadReq, res
 					var value any
 
 					switch name {
-					case "relname":
+					case catalog.SystemRelAttr_Name:
 						value = row.Name
-					case "reldatabase":
-						value = c.database.Name
-					case "relpersistence":
+					case catalog.SystemRelAttr_DBName:
+						dbRow, err := c.upstream.databases.Get(tx, Text(row.DatabaseID))
+						if err != nil {
+							return nil
+						}
+						value = dbRow.Name
+					case catalog.SystemRelAttr_Persistence:
 						value = true
-					case "relkind":
+					case catalog.SystemRelAttr_Kind:
 						value = "r"
-					case "rel_comment":
+					case catalog.SystemRelAttr_Comment:
 						value = row.Comments
-					case "rel_createsql":
+					case catalog.SystemRelAttr_CreateSQL:
 						value = ""
 					default:
 						resp.ErrColumnNotFound.Name = name
@@ -501,44 +525,55 @@ func (c *CatalogHandler) HandleRead(meta txn.TxnMeta, req txnengine.ReadReq, res
 					var value any
 
 					switch name {
-					case "att_database":
-						value = c.database.Name
-					case "att_relname":
-						rel := c.relations[row.RelationID]
-						value = rel.Name
-					case "attname":
+					case catalog.SystemColAttr_DBName:
+						relRow, err := c.upstream.relations.Get(tx, Text(row.RelationID))
+						if err != nil {
+							return err
+						}
+						dbRow, err := c.upstream.databases.Get(tx, Text(relRow.DatabaseID))
+						if err != nil {
+							return err
+						}
+						value = dbRow.Name
+					case catalog.SystemColAttr_RelName:
+						relRow, err := c.upstream.relations.Get(tx, Text(row.RelationID))
+						if err != nil {
+							return err
+						}
+						value = relRow.Name
+					case catalog.SystemColAttr_Name:
 						value = row.Name
-					case "atttyp":
+					case catalog.SystemColAttr_Type:
 						value = row.Type.Oid
-					case "attnum":
+					case catalog.SystemColAttr_Num:
 						value = row.Order
-					case "att_length":
+					case catalog.SystemColAttr_Length:
 						value = row.Type.Size
-					case "attnotnull":
+					case catalog.SystemColAttr_NullAbility:
 						value = row.Default.NullAbility
-					case "atthasdef":
+					case catalog.SystemColAttr_HasExpr:
 						value = row.Default.Expr != nil
-					case "att_default":
+					case catalog.SystemColAttr_DefaultExpr:
 						value = row.Default.Expr.String()
-					case "attisdropped":
+					case catalog.SystemColAttr_IsDropped:
 						value = false
-					case "att_constraint_type":
+					case catalog.SystemColAttr_ConstraintType:
 						if row.Primary {
 							value = "p"
 						} else {
 							value = "n"
 						}
-					case "att_is_unsigned":
+					case catalog.SystemColAttr_IsUnsigned:
 						value = row.Type.Oid == types.T_uint8 ||
 							row.Type.Oid == types.T_uint16 ||
 							row.Type.Oid == types.T_uint32 ||
 							row.Type.Oid == types.T_uint64 ||
 							row.Type.Oid == types.T_uint128
-					case "att_is_auto_increment":
-						value = false
-					case "att_comment":
+					case catalog.SystemColAttr_IsAutoIncrement:
+						value = false //TODO
+					case catalog.SystemColAttr_Comment:
 						value = row.Comment
-					case "att_is_hidden":
+					case catalog.SystemColAttr_IsHidden:
 						value = row.IsHidden
 					default:
 						resp.ErrColumnNotFound.Name = name