diff --git a/br/pkg/utils/backoff.go b/br/pkg/utils/backoff.go index f2d6ddfba2f13..0294136f7539f 100644 --- a/br/pkg/utils/backoff.go +++ b/br/pkg/utils/backoff.go @@ -185,6 +185,11 @@ func (bo *importerBackoffer) NextBackoff(err error) time.Duration { } } } + failpoint.Inject("set-import-attempt-to-one", func(_ failpoint.Value) { + if bo.attempt > 1 { + bo.attempt = 1 + } + }) if bo.delayTime > bo.maxDelayTime { return bo.maxDelayTime } diff --git a/br/tests/br_file_corruption/run.sh b/br/tests/br_file_corruption/run.sh new file mode 100644 index 0000000000000..35a7698bb9fef --- /dev/null +++ b/br/tests/br_file_corruption/run.sh @@ -0,0 +1,54 @@ +#!/bin/sh +# +# Copyright 2024 PingCAP, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eux + +DB="$TEST_NAME" +TABLE="usertable" +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + +run_sql "CREATE DATABASE $DB;" +go-ycsb load mysql -P $CUR/workload -p mysql.host=$TIDB_IP -p mysql.port=$TIDB_PORT -p mysql.user=root -p mysql.db=$DB +run_br --pd $PD_ADDR backup full -s "local://$TEST_DIR/$DB" + +filename=$(find $TEST_DIR/$DB -regex ".*.sst" | head -n 1) +filename_temp=$filename"_temp" +filename_bak=$filename"_bak" +echo "corruption" > $filename_temp +cat $filename >> $filename_temp + +# file lost +mv $filename $filename_bak +export GO_FAILPOINTS="github.com/pingcap/tidb/br/pkg/utils/set-import-attempt-to-one=return(true)" +restore_fail=0 +run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$DB" || restore_fail=1 +export GO_FAILPOINTS="" +if [ $restore_fail -ne 1 ]; then + echo 'restore success' + exit 1 +fi + +# file corruption +mv $filename_temp $filename +truncate --size=-11 $filename +export GO_FAILPOINTS="github.com/pingcap/tidb/br/pkg/utils/set-import-attempt-to-one=return(true)" +restore_fail=0 +run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$DB" || restore_fail=1 +export GO_FAILPOINTS="" +if [ $restore_fail -ne 1 ]; then + echo 'restore success' + exit 1 +fi diff --git a/br/tests/br_file_corruption/workload b/br/tests/br_file_corruption/workload new file mode 100644 index 0000000000000..e3fadf9a3d068 --- /dev/null +++ b/br/tests/br_file_corruption/workload @@ -0,0 +1,12 @@ +recordcount=10000 +operationcount=0 +workload=core + +readallfields=true + +readproportion=0 +updateproportion=0 +scanproportion=0 +insertproportion=0 + +requestdistribution=uniform diff --git a/br/tests/br_full_ddl/run.sh b/br/tests/br_full_ddl/run.sh index e0871e91dd589..b43ff76e0067b 100755 --- a/br/tests/br_full_ddl/run.sh +++ b/br/tests/br_full_ddl/run.sh @@ -22,6 +22,11 @@ LOG=/$TEST_DIR/backup.log RESTORE_LOG=LOG=/$TEST_DIR/restore.log BACKUP_STAT=/$TEST_DIR/backup_stat RESOTRE_STAT=/$TEST_DIR/restore_stat +<<<<<<< HEAD +======= +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +res_file="$TEST_DIR/sql_res.$TEST_NAME.txt" +>>>>>>> 5399ca70da9 (br: fix br integration test (#53836)) run_sql "CREATE DATABASE $DB;" go-ycsb load mysql -P tests/$TEST_NAME/workload -p mysql.host=$TIDB_IP -p mysql.port=$TIDB_PORT -p mysql.user=root -p mysql.db=$DB @@ -38,6 +43,23 @@ for i in $(seq $DDL_COUNT); do fi done +# wait until the index creation/drop is done +retry_cnt=0 +while true; do + run_sql "ADMIN SHOW DDL JOBS WHERE DB_NAME = '$DB' AND TABLE_NAME = '$TABLE' AND STATE != 'synced';" + if grep -Fq "1. row" $res_file; then + cat $res_file + retry_cnt=$((retry_cnt+1)) + if [ "$retry_cnt" -gt 50 ]; then + echo 'the wait lag is too large' + exit 1 + fi + continue + fi + + break +done + # run analyze to generate stats run_sql "analyze table $DB.$TABLE;" # record field0's stats and remove last_update_version diff --git a/br/tests/br_pitr/run.sh b/br/tests/br_pitr/run.sh index 96faac6ef88ae..1b90bda638706 100644 --- a/br/tests/br_pitr/run.sh +++ b/br/tests/br_pitr/run.sh @@ -98,6 +98,7 @@ restart_services echo "run pitr" run_br --pd $PD_ADDR restore point -s "local://$TEST_DIR/$PREFIX/log" --full-backup-storage "local://$TEST_DIR/$PREFIX/full" > $res_file 2>&1 +<<<<<<< HEAD # check something in downstream cluster echo "check br log" check_contains "restore log success summary" @@ -109,3 +110,67 @@ run_sql "select * from mysql.gc_delete_range_done" run_sql "select count(*) DELETE_RANGE_CNT from (select * from mysql.gc_delete_range union all select * from mysql.gc_delete_range_done) del_range group by ts order by DELETE_RANGE_CNT desc limit 1;" expect_delete_range=$(($incremental_delete_range_count-$prepare_delete_range_count)) check_contains "DELETE_RANGE_CNT: $expect_delete_range" +======= +check_result + +# start a new cluster for incremental + log +echo "restart a services" +restart_services + +echo "run snapshot restore#2" +run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$PREFIX/full" + +echo "run incremental restore + log restore" +run_br --pd $PD_ADDR restore point -s "local://$TEST_DIR/$PREFIX/log" --full-backup-storage "local://$TEST_DIR/$PREFIX/inc" > $res_file 2>&1 + +check_result + +# start a new cluster for incremental + log +echo "restart a services" +restart_services + +echo "run snapshot restore#3" +run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$PREFIX/full" + +echo "run incremental restore but failed" +restore_fail=0 +run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$PREFIX/inc_fail" || restore_fail=1 +if [ $restore_fail -ne 1 ]; then + echo 'pitr success' + exit 1 +fi + +# start a new cluster for corruption +echo "restart a services" +restart_services + +echo "corrupt a log file" +filename=$(find $TEST_DIR/$PREFIX/log -regex ".*\.log" | grep -v "schema-meta" | tail -n 1) +filename_temp=$filename"_temp" +filename_bak=$filename"_bak" +echo "corruption" > $filename_temp +cat $filename >> $filename_temp + +# file lost +mv $filename $filename_bak +export GO_FAILPOINTS="github.com/pingcap/tidb/br/pkg/utils/set-import-attempt-to-one=return(true)" +restore_fail=0 +run_br --pd $PD_ADDR restore point -s "local://$TEST_DIR/$PREFIX/log" --full-backup-storage "local://$TEST_DIR/$PREFIX/full" || restore_fail=1 +export GO_FAILPOINTS="" +if [ $restore_fail -ne 1 ]; then + echo 'pitr success' + exit 1 +fi + +# file corruption +mv $filename_temp $filename +truncate --size=-11 $filename +export GO_FAILPOINTS="github.com/pingcap/tidb/br/pkg/utils/set-import-attempt-to-one=return(true)" +restore_fail=0 +run_br --pd $PD_ADDR restore point -s "local://$TEST_DIR/$PREFIX/log" --full-backup-storage "local://$TEST_DIR/$PREFIX/full" || restore_fail=1 +export GO_FAILPOINTS="" +if [ $restore_fail -ne 1 ]; then + echo 'pitr success' + exit 1 +fi +>>>>>>> 5399ca70da9 (br: fix br integration test (#53836)) diff --git a/br/tests/br_txn/run.sh b/br/tests/br_txn/run.sh new file mode 100755 index 0000000000000..567be9d76e263 --- /dev/null +++ b/br/tests/br_txn/run.sh @@ -0,0 +1,146 @@ +#!/bin/sh +# +# Copyright 2023 PingCAP, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eux + +# restart service without tiflash +source $UTILS_DIR/run_services +start_services --no-tiflash + +BACKUP_DIR=$TEST_DIR/"txn_backup" +BACKUP_FULL=$TEST_DIR/"txnkv-full" + +checksum() { + bin/txnkv --pd $PD_ADDR \ + --ca "$TEST_DIR/certs/ca.pem" \ + --cert "$TEST_DIR/certs/br.pem" \ + --key "$TEST_DIR/certs/br.key" \ + --mode checksum --start-key $1 --end-key $2 | grep result | tail -n 1 | awk '{print $3}' +} + +fail_and_exit() { + echo "TEST: [$TEST_NAME] failed!" + exit 1 +} + +clean() { + bin/txnkv --pd $PD_ADDR \ + --ca "$TEST_DIR/certs/ca.pem" \ + --cert "$TEST_DIR/certs/br.pem" \ + --key "$TEST_DIR/certs/br.key" \ + --mode delete --start-key $1 --end-key $2 +} + +test_full_txnkv_encryption() { + check_range_start="hello" + check_range_end="world" + + rm -rf $BACKUP_FULL + + checksum_full=$(checksum $check_range_start $check_range_end) + # backup current state of key-values + run_br --pd $PD_ADDR backup txn -s "local://$BACKUP_FULL" --crypter.method "aes128-ctr" --crypter.key "0123456789abcdef0123456789abcdef" + + clean $check_range_start $check_range_end + # Ensure the data is deleted + checksum_new=$(checksum $check_range_start $check_range_end) + if [ "$checksum_new" == "$checksum_full" ];then + echo "failed to delete data in range in encryption" + fail_and_exit + fi + + run_br --pd $PD_ADDR restore txn -s "local://$BACKUP_FULL" --crypter.method "aes128-ctr" --crypter.key "0123456789abcdef0123456789abcdef" + checksum_new=$(checksum $check_range_start $check_range_end) + if [ "$checksum_new" != "$checksum_full" ];then + echo "failed to restore" + fail_and_exit + fi +} + +run_test() { + if [ -z "$1" ];then + echo "run test" + else + export GO_FAILPOINTS="$1" + echo "run test with failpoints: $GO_FAILPOINTS" + fi + + rm -rf $BACKUP_DIR + clean "hello" "world" + + # generate txn kv randomly in range[start-key, end-key) in 10s + bin/txnkv --pd $PD_ADDR \ + --ca "$TEST_DIR/certs/ca.pem" \ + --cert "$TEST_DIR/certs/br.pem" \ + --key "$TEST_DIR/certs/br.key" \ + --mode rand-gen --start-key "hello" --end-key "world" --duration 10 + + checksum_ori=$(checksum "hello" "world") + + # backup txnkv + echo "backup start..." + run_br --pd $PD_ADDR backup txn -s "local://$BACKUP_DIR" + + # delete data in range[start-key, end-key) + clean "hello" "world" + # Ensure the data is deleted + retry_cnt=0 + while true; do + checksum_new=$(checksum "hello" "world") + + if [ "$checksum_new" != "$checksum_empty" ]; then + echo "failed to delete data in range after backup; retry_cnt = $retry_cnt" + retry_cnt=$((retry_cnt+1)) + if [ "$retry_cnt" -gt 50 ]; then + fail_and_exit + fi + sleep 1 + continue + fi + + break + done + + # restore rawkv + echo "restore start..." + run_br --pd $PD_ADDR restore txn -s "local://$BACKUP_DIR" + + checksum_new=$(checksum "hello" "world") + + if [ "$checksum_new" != "$checksum_ori" ];then + echo "checksum failed after restore" + fail_and_exit + fi + + test_full_txnkv_encryption + + # delete data in range[start-key, end-key) + clean "hello" "world" + # Ensure the data is deleted + checksum_new=$(checksum "hello" "world") + + if [ "$checksum_new" != "$checksum_empty" ];then + echo "failed to delete data in range" + fail_and_exit + fi + + export GO_FAILPOINTS="" +} + +# delete data in range[start-key, end-key) +clean "hello" "world" +checksum_empty=$(checksum "hello" "world") +run_test "" diff --git a/br/tests/run_group_br_tests.sh b/br/tests/run_group_br_tests.sh new file mode 100755 index 0000000000000..04ff8c60701d4 --- /dev/null +++ b/br/tests/run_group_br_tests.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# This script split the integration tests into 9 groups to support parallel group tests execution. +# all the integration tests are located in br/tests directory. only the directories +# containing run.sh will be considered as valid br integration tests. the script will print the total case number + +set -eo pipefail + +# Step 1 +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +group=$1 +export COV_DIR="/tmp/group_cover" +rm -rf $COV_DIR +mkdir -p $COV_DIR + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tidb/latest/pull_br_integration_test.groovy +# Each group of tests consumes as much time as possible, thus reducing CI waiting time. +# Putting multiple light tests together and heavy tests in a separate group. +declare -A groups +groups=( + ["G00"]="br_300_small_tables br_backup_empty br_backup_version br_cache_table br_case_sensitive br_charset_gbk br_check_new_collocation_enable br_history br_gcs br_rawkv" + ["G01"]="br_autoid br_crypter2 br_db br_db_online br_db_online_newkv br_db_skip br_debug_meta br_ebs br_foreign_key br_full br_table_partition br_full_ddl" + ["G02"]="br_full_cluster_restore br_full_index br_incremental_ddl br_pitr_failpoint" + ["G03"]='br_incompatible_tidb_config br_incremental br_incremental_index br_incremental_only_ddl br_incremental_same_table br_insert_after_restore br_key_locked br_log_test br_move_backup br_mv_index br_other br_partition_add_index br_tidb_placement_policy br_tiflash br_tiflash_conflict' + ["G04"]='br_range br_replica_read br_restore_TDE_enable br_restore_log_task_enable br_s3 br_shuffle_leader br_shuffle_region br_single_table' + ["G05"]='br_skip_checksum br_split_region_fail br_systables br_table_filter br_txn br_stats br_clustered_index br_crypter' + ["G06"]='br_tikv_outage br_tikv_outage3' + ["G07"]='br_pitr' + ["G08"]='br_tikv_outage2 br_ttl br_views_and_sequences br_z_gc_safepoint br_autorandom br_file_corruption' +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + if [[ $test_name != br* ]]; then + continue + fi + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +if [[ "$group" == "others" ]]; then + if [[ -z $others ]]; then + echo "All br integration test cases have been added to groups" + exit 0 + fi + echo "Error: "$others" is not added to any group in br/tests/run_group_br_tests.sh" + exit 1 +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" + # Run test cases + if [[ -n $test_names ]]; then + echo "" + echo "Run cases: ${test_names}" + for case_name in $test_names; do + echo "Run cases: ${case_name}" + rm -rf /tmp/backup_restore_test + mkdir -p /tmp/backup_restore_test + TEST_NAME=${case_name} ${CUR}/run.sh + done + fi +else + echo "Error: invalid group name: ${group}" + exit 1 +fi