Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

executor, server: load_data.go is changed and add unit test #23201

Merged
merged 7 commits into from
Mar 29, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 22 additions & 6 deletions executor/load_data.go
Original file line number Diff line number Diff line change
Expand Up @@ -404,9 +404,23 @@ func (e *LoadDataInfo) getValidData(prevData, curData []byte) ([]byte, []byte) {
return nil, curData
}

func (e *LoadDataInfo) isInQuoter(bs []byte) bool {
inQuoter := false
for i := 0; i < len(bs); i++ {
switch bs[i] {
case e.FieldsInfo.Enclosed:
inQuoter = !inQuoter
case e.FieldsInfo.Escaped:
i++
default:
}
}
return inQuoter
}

// indexOfTerminator return index of terminator, if not, return -1.
// normally, the field terminator and line terminator is short, so we just use brute force algorithm.
func (e *LoadDataInfo) indexOfTerminator(bs []byte) int {
func (e *LoadDataInfo) indexOfTerminator(bs []byte, isInQuoter bool) int {
fieldTerm := []byte(e.FieldsInfo.Terminated)
fieldTermLen := len(fieldTerm)
lineTerm := []byte(e.LinesInfo.Terminated)
Expand Down Expand Up @@ -448,7 +462,9 @@ func (e *LoadDataInfo) indexOfTerminator(bs []byte) int {
loop:
for i := 0; i < len(bs); i++ {
if atFieldStart && bs[i] == e.FieldsInfo.Enclosed {
inQuoter = true
if !isInQuoter {
inQuoter = true
}
atFieldStart = false
continue
}
Expand Down Expand Up @@ -496,7 +512,7 @@ func (e *LoadDataInfo) getLine(prevData, curData []byte, ignore bool) ([]byte, [
if prevData == nil && len(curData) < startingLen {
return nil, curData, false
}

inquotor := e.isInQuoter(prevData)
prevLen := len(prevData)
terminatedLen := len(e.LinesInfo.Terminated)
curStartIdx := 0
Expand All @@ -508,7 +524,7 @@ func (e *LoadDataInfo) getLine(prevData, curData []byte, ignore bool) ([]byte, [
if ignore {
endIdx = strings.Index(string(hack.String(curData[curStartIdx:])), e.LinesInfo.Terminated)
} else {
endIdx = e.indexOfTerminator(curData[curStartIdx:])
endIdx = e.indexOfTerminator(curData[curStartIdx:], inquotor)
}
}
if endIdx == -1 {
Expand All @@ -522,7 +538,7 @@ func (e *LoadDataInfo) getLine(prevData, curData []byte, ignore bool) ([]byte, [
if ignore {
endIdx = strings.Index(string(hack.String(curData[startingLen:])), e.LinesInfo.Terminated)
} else {
endIdx = e.indexOfTerminator(curData[startingLen:])
endIdx = e.indexOfTerminator(curData[startingLen:], inquotor)
}
if endIdx != -1 {
nextDataIdx := startingLen + endIdx + terminatedLen
Expand All @@ -543,7 +559,7 @@ func (e *LoadDataInfo) getLine(prevData, curData []byte, ignore bool) ([]byte, [
if ignore {
endIdx = strings.Index(string(hack.String(prevData[startingLen:])), e.LinesInfo.Terminated)
} else {
endIdx = e.indexOfTerminator(prevData[startingLen:])
endIdx = e.indexOfTerminator(prevData[startingLen:], inquotor)
}
if endIdx >= prevLen {
return prevData[startingLen : startingLen+endIdx], curData[nextDataIdx:], true
Expand Down
57 changes: 57 additions & 0 deletions server/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,63 @@ func (cli *testServerClient) runTestLoadDataAutoRandom(c *C) {
})
}

func (cli *testServerClient) runTestLoadDataAutoRandomWithSpecialTerm(c *C) {
path := "/tmp/load_data_txn_error_term.csv"
Copy link
Member

@zz-jason zz-jason Mar 27, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's better to convert this test to an integration test.


fp, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
c.Assert(err, IsNil)
c.Assert(fp, NotNil)

defer func() {
_ = os.Remove(path)
}()

cksum1 := 0
cksum2 := 0
for i := 0; i < 50000; i++ {
n1 := rand.Intn(1000)
n2 := rand.Intn(1000)
str1 := strconv.Itoa(n1)
str2 := strconv.Itoa(n2)
row := "'" + str1 + "','" + str2 + "'"
_, err := fp.WriteString(row)
c.Assert(err, IsNil)
if i != 49999 {
_, err = fp.WriteString("|")
}
c.Assert(err, IsNil)

if i == 0 {
cksum1 = n1
cksum2 = n2
} else {
cksum1 = cksum1 ^ n1
cksum2 = cksum2 ^ n2
}
}

err = fp.Close()
c.Assert(err, IsNil)

cli.runTestsOnNewDB(c, func(config *mysql.Config) {
config.AllowAllFiles = true
config.Params = map[string]string{"sql_mode": "''"}
}, "load_data_batch_dml", func(dbt *DBTest) {
// Set batch size, and check if load data got a invalid txn error.
dbt.mustExec("set @@session.tidb_dml_batch_size = 128")
dbt.mustExec("drop table if exists t1")
dbt.mustExec("create table t1(c1 bigint auto_random primary key, c2 bigint, c3 bigint)")
dbt.mustExec(fmt.Sprintf("load data local infile %q into table t1 fields terminated by ',' enclosed by '\\'' lines terminated by '|' (c2, c3)", path))
rows := dbt.mustQuery("select count(*) from t1")
cli.checkRows(c, rows, "50000")
rows = dbt.mustQuery("select bit_xor(c2), bit_xor(c3) from t1")
res := strconv.Itoa(cksum1)
res = res + " "
res = res + strconv.Itoa(cksum2)
cli.checkRows(c, rows, res)
})
}

func (cli *testServerClient) runTestLoadDataForListPartition(c *C) {
path := "/tmp/load_data_list_partition.csv"
defer func() {
Expand Down
4 changes: 4 additions & 0 deletions server/tidb_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,10 @@ func (ts *tidbTestSerialSuite) TestLoadDataAutoRandom(c *C) {
ts.runTestLoadDataAutoRandom(c)
}

func (ts *tidbTestSerialSuite) TestLoadDataAutoRandomWithSpecialTerm(c *C) {
ts.runTestLoadDataAutoRandomWithSpecialTerm(c)
}

func (ts *tidbTestSerialSuite) TestExplainFor(c *C) {
ts.runTestExplainForConn(c)
}
Expand Down