127 lines
3.6 KiB
Go
127 lines
3.6 KiB
Go
// Copyright 2023 PingCAP, Inc.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
package importinto
|
|
|
|
import (
|
|
"encoding/json"
|
|
"testing"
|
|
|
|
"github.com/pingcap/tidb/disttask/framework/planner"
|
|
"github.com/pingcap/tidb/domain/infosync"
|
|
"github.com/pingcap/tidb/executor/importer"
|
|
"github.com/pingcap/tidb/meta/autoid"
|
|
"github.com/pingcap/tidb/parser/model"
|
|
"github.com/pingcap/tidb/parser/mysql"
|
|
"github.com/stretchr/testify/require"
|
|
)
|
|
|
|
func TestLogicalPlan(t *testing.T) {
|
|
logicalPlan := &LogicalPlan{
|
|
JobID: 1,
|
|
Plan: importer.Plan{},
|
|
Stmt: `IMPORT INTO db.tb FROM 'gs://test-load/*.csv?endpoint=xxx'`,
|
|
EligibleInstances: []*infosync.ServerInfo{{ID: "1"}},
|
|
ChunkMap: map[int32][]Chunk{1: {{Path: "gs://test-load/1.csv"}}},
|
|
}
|
|
bs, err := logicalPlan.ToTaskMeta()
|
|
require.NoError(t, err)
|
|
plan := &LogicalPlan{}
|
|
require.NoError(t, plan.FromTaskMeta(bs))
|
|
require.Equal(t, logicalPlan, plan)
|
|
}
|
|
|
|
func TestToPhysicalPlan(t *testing.T) {
|
|
chunkID := int32(1)
|
|
logicalPlan := &LogicalPlan{
|
|
JobID: 1,
|
|
Plan: importer.Plan{
|
|
DBName: "db",
|
|
TableInfo: &model.TableInfo{
|
|
Name: model.NewCIStr("tb"),
|
|
},
|
|
},
|
|
Stmt: `IMPORT INTO db.tb FROM 'gs://test-load/*.csv?endpoint=xxx'`,
|
|
EligibleInstances: []*infosync.ServerInfo{{ID: "1"}},
|
|
ChunkMap: map[int32][]Chunk{chunkID: {{Path: "gs://test-load/1.csv"}}},
|
|
}
|
|
planCtx := planner.PlanCtx{}
|
|
physicalPlan, err := logicalPlan.ToPhysicalPlan(planCtx)
|
|
require.NoError(t, err)
|
|
plan := &planner.PhysicalPlan{
|
|
Processors: []planner.ProcessorSpec{
|
|
{
|
|
ID: 0,
|
|
Pipeline: &ImportSpec{
|
|
ID: chunkID,
|
|
Plan: logicalPlan.Plan,
|
|
Chunks: logicalPlan.ChunkMap[chunkID],
|
|
},
|
|
Output: planner.OutputSpec{
|
|
Links: []planner.LinkSpec{
|
|
{
|
|
ProcessorID: 1,
|
|
},
|
|
},
|
|
},
|
|
Step: StepImport,
|
|
},
|
|
{
|
|
ID: 1,
|
|
Input: planner.InputSpec{
|
|
ColumnTypes: []byte{
|
|
mysql.TypeLonglong, mysql.TypeLonglong, mysql.TypeLonglong, mysql.TypeLonglong, mysql.TypeLonglong, mysql.TypeJSON,
|
|
},
|
|
Links: []planner.LinkSpec{
|
|
{
|
|
ProcessorID: 0,
|
|
},
|
|
},
|
|
},
|
|
Pipeline: &PostProcessSpec{
|
|
Schema: "db",
|
|
Table: "tb",
|
|
},
|
|
Step: StepPostProcess,
|
|
},
|
|
},
|
|
}
|
|
require.Equal(t, plan, physicalPlan)
|
|
|
|
subtaskMetas1, err := physicalPlan.ToSubtaskMetas(planCtx, StepImport)
|
|
require.NoError(t, err)
|
|
subtaskMeta1 := ImportStepMeta{
|
|
ID: chunkID,
|
|
Chunks: logicalPlan.ChunkMap[chunkID],
|
|
}
|
|
bs, err := json.Marshal(subtaskMeta1)
|
|
require.NoError(t, err)
|
|
require.Equal(t, [][]byte{bs}, subtaskMetas1)
|
|
|
|
subtaskMeta1.Checksum = Checksum{Size: 1, KVs: 2, Sum: 3}
|
|
bs, err = json.Marshal(subtaskMeta1)
|
|
require.NoError(t, err)
|
|
subtaskMetas2, err := physicalPlan.ToSubtaskMetas(planner.PlanCtx{
|
|
PreviousSubtaskMetas: [][]byte{bs},
|
|
}, StepPostProcess)
|
|
require.NoError(t, err)
|
|
subtaskMeta2 := PostProcessStepMeta{
|
|
Checksum: Checksum{Size: 1, KVs: 2, Sum: 3},
|
|
MaxIDs: map[autoid.AllocatorType]int64{},
|
|
}
|
|
bs, err = json.Marshal(subtaskMeta2)
|
|
require.NoError(t, err)
|
|
require.Equal(t, [][]byte{bs}, subtaskMetas2)
|
|
}
|