Skip to content

Commit

Permalink
cop: fix the scan panic when checksum is enabled (tikv#16373)
Browse files Browse the repository at this point in the history
close tikv#16371

Fix the scan panic issue when checksum is enabled.

Signed-off-by: cfzjywxk <[email protected]>

Co-authored-by: ti-chi-bot[bot] <108142056+ti-chi-bot[bot]@users.noreply.github.com>
  • Loading branch information
cfzjywxk and ti-chi-bot[bot] authored Jan 15, 2024
1 parent 2a553aa commit 67c7fa1
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 4 deletions.
20 changes: 20 additions & 0 deletions components/test_coprocessor/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,26 @@ impl<E: Engine> Store<E> {
.collect();
FixtureStore::new(data)
}

pub fn insert_all_null_row(
&mut self,
tbl: &Table,
ctx: Context,
with_checksum: bool,
extra_checksum: Option<u32>,
) {
self.begin();
let inserts = self
.insert_into(tbl)
.set(&tbl["id"], Datum::Null)
.set(&tbl["name"], Datum::Null)
.set(&tbl["count"], Datum::Null)
.set_v2(&tbl["id"], ScalarValue::Int(None))
.set_v2(&tbl["name"], ScalarValue::Bytes(None))
.set_v2(&tbl["count"], ScalarValue::Int(None));
inserts.execute_with_v2_checksum(ctx.clone(), with_checksum, extra_checksum);
self.commit();
}
}

/// A trait for a general implementation to convert to a Txn store.
Expand Down
12 changes: 10 additions & 2 deletions components/tidb_query_datatype/src/codec/row/v2/row_slice.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,15 +233,23 @@ impl RowSlice<'_> {
RowSlice::Big {
offsets, values, ..
} => {
let last_slice_idx = offsets.get(non_null_col_num - 1).unwrap() as usize;
let last_slice_idx = if non_null_col_num == 0 {
0
} else {
offsets.get(non_null_col_num - 1).unwrap() as usize
};
let slice = values.slice;
*values = LeBytes::new(&slice[..last_slice_idx]);
&slice[last_slice_idx..]
}
RowSlice::Small {
offsets, values, ..
} => {
let last_slice_idx = offsets.get(non_null_col_num - 1).unwrap() as usize;
let last_slice_idx = if non_null_col_num == 0 {
0
} else {
offsets.get(non_null_col_num - 1).unwrap() as usize
};
let slice = values.slice;
*values = LeBytes::new(&slice[..last_slice_idx]);
&slice[last_slice_idx..]
Expand Down
9 changes: 7 additions & 2 deletions tests/integrations/coprocessor/test_select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2087,11 +2087,16 @@ fn test_select_v2_format_with_checksum() {
for extra_checksum in [None, Some(132423)] {
// The row value encoded with checksum bytes should have no impact on cop task
// processing and related result chunk filling.
let (_, endpoint) =
let (mut store, endpoint) =
init_data_with_commit_v2_checksum(&product, &data, true, extra_checksum);
store.insert_all_null_row(&product, Context::default(), true, extra_checksum);
let req = DagSelect::from(&product).build();
let mut resp = handle_select(&endpoint, req);
let spliter = DagChunkSpliter::new(resp.take_chunks().into(), 3);
let mut spliter = DagChunkSpliter::new(resp.take_chunks().into(), 3);
let first_row = spliter.next().unwrap();
assert_eq!(first_row[0], Datum::I64(0));
assert_eq!(first_row[1], Datum::Null);
assert_eq!(first_row[2], Datum::Null);
for (row, (id, name, cnt)) in spliter.zip(data.clone()) {
let name_datum = name.map(|s| s.as_bytes()).into();
let expected_encoded = datum::encode_value(
Expand Down

0 comments on commit 67c7fa1

Please sign in to comment.