Skip to content

Commit

Permalink
Merge pull request #236 from datastax/issue/build-stability-fixes
Browse files Browse the repository at this point in the history
Build stability fixes
  • Loading branch information
pravinbhat authored Dec 18, 2023
2 parents 23de805 + ea959cb commit 025b73e
Show file tree
Hide file tree
Showing 13 changed files with 77 additions and 22 deletions.
17 changes: 13 additions & 4 deletions SIT/environment.sh
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ _testDockerNetwork() {
}

_testDockerCassandra() {
dockerPs=$(docker ps -a | awk '{if ($NF == "'${DOCKER_CASS}'") {print "yes"}}')
dockerPs=$(docker ps --all --filter "name=${DOCKER_CASS}" --format "{{.Status}}" | awk '{if ($1 == "Up") {print "yes"}}')
if [ "$dockerPs" != "yes" ]; then
echo "no"
return
Expand Down Expand Up @@ -154,7 +154,7 @@ _dropKeyspaces() {
}

_testDockerCDM() {
dockerPs=$(docker ps -a | awk '{if ($NF == "'${DOCKER_CDM}'") {print "yes"}}')
dockerPs=$(docker ps --all --filter "name=${DOCKER_CDM}" --format "{{.Status}}" | awk '{if ($1 == "Up") {print "yes"}}')
if [ "$dockerPs" != "yes" ]; then
echo "no"
else
Expand Down Expand Up @@ -228,8 +228,17 @@ _Setup() {

if [ "$(_testDockerCDM)" != "yes" ]; then
dockerContainerVersion=datastax/cassandra-data-migrator:${CDM_VERSION}

# Uncomment the below 'docker build' lines when making docker changes to ensure you test the docker changes
# Also comment the 'docker pull' line when 'docker build' is uncommented.
# Note this ('docker build') should be done only when testing docker changes locally (i.e. Do not commit)
# If you commit the 'docker build' step, the build will work but it will take too long as each time it will build
# CDM docker image instead of just downloading from DockerHub.
_info "Pulling latest Docker container for ${dockerContainerVersion}"
docker pull ${dockerContainerVersion}
# _info "Building latest Docker container for ${dockerContainerVersion}"
# docker build -t ${dockerContainerVersion} ..

_info "Starting Docker container ${DOCKER_CASS}"
docker run --name ${DOCKER_CDM} --network ${NETWORK_NAME} --ip ${SUBNET}.3 -e "CASS_USERNAME=${CASS_USERNAME}" -e "CASS_PASSWORD=${CASS_PASSWORD}" -e "CASS_CLUSTER=${DOCKER_CASS}" -d ${dockerContainerVersion}
attempt=1
Expand Down Expand Up @@ -278,7 +287,7 @@ _Validate() {
fi

if [ "$(_testDockerCassandra)" == "yes" ]; then
_info "Cassandra Docker is valid"
_info "Cassandra Docker is valid and running"
else
_warn "Cassandra Docker is invalid"
invalid=1
Expand All @@ -292,7 +301,7 @@ _Validate() {
fi

if [ "$(_testDockerCDM)" == "yes" ]; then
_info "CDM Docker is valid"
_info "CDM Docker is valid and running"
else
_warn "CDM Docker is invalid"
invalid=1
Expand Down
6 changes: 6 additions & 0 deletions SIT/features/01_constant_column/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,11 @@

DELETE FROM target.feature_constant_column WHERE key='key2' AND const1='abcd';
UPDATE target.feature_constant_column SET value='value999' WHERE key='key3' AND const1='abcd';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.feature_constant_column(key,value) VALUES ('key1','valueA');
INSERT INTO origin.feature_constant_column(key,value) VALUES ('key2','valueB');
INSERT INTO origin.feature_constant_column(key,value) VALUES ('key3','valueC');

SELECT * FROM target.feature_constant_column;

6 changes: 6 additions & 0 deletions SIT/features/02_explode_map/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,11 @@ DELETE FROM target.feature_explode_map WHERE key='key2';
UPDATE target.feature_explode_map SET value='value999' WHERE key='key3' AND fruit='apples';
UPDATE target.feature_explode_map SET fruit_qty=999 WHERE key='key3' AND fruit='oranges';
DELETE FROM target.feature_explode_map WHERE key='key3' AND fruit='kiwi';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.feature_explode_map(key,value,fruits) VALUES ('key1','valueA', {'apples': 3, 'oranges': 5, 'bananas': 2, 'grapes': 11});
INSERT INTO origin.feature_explode_map(key,value,fruits) VALUES ('key2','valueB', {'apples': 4, 'oranges': 6, 'bananas': 3, 'pears': 7});
INSERT INTO origin.feature_explode_map(key,value,fruits) VALUES ('key3','valueC', {'apples': 5, 'oranges': 7, 'bananas': 4, 'kiwi': 42});

SELECT * FROM target.feature_explode_map;

9 changes: 9 additions & 0 deletions SIT/features/03_codec/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,13 @@ UPDATE target.codec SET
val_timestamp='2024-04-16 10:30:00+0000',
val_decimal=999.1234
WHERE key='key3';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.codec(key,val_int,val_bigint,val_timestamp,val_decimal,val_double)
VALUES ('key1','1234' ,'9223372036854775807','040616110000', '3.14', '21474836470.7');
INSERT INTO origin.codec(key,val_int,val_bigint,val_timestamp,val_decimal,val_double)
VALUES ('key2','12345' ,'2147483648' ,'990616110000', '4.14', '21474836470.7');
INSERT INTO origin.codec(key,val_int,val_bigint,val_timestamp,val_decimal,val_double)
VALUES ('key3','123456','3141592653589793' ,'990616110000', '5.14', '21474836470.7');

SELECT * FROM target.codec;
6 changes: 6 additions & 0 deletions SIT/features/06_partition_range/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,11 @@

DELETE FROM target.feature_partition_range WHERE key='key1';
UPDATE target.feature_partition_range SET value='value999' WHERE key='key2';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.feature_partition_range(key,value) VALUES ('key1','valueA');
INSERT INTO origin.feature_partition_range(key,value) VALUES ('key2','valueB');
INSERT INTO origin.feature_partition_range(key,value) VALUES ('key3','valueC');

SELECT * FROM target.feature_partition_range;

6 changes: 6 additions & 0 deletions SIT/regression/01_explode_map_with_constants/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,10 @@ DELETE FROM target.feature_explode_map_with_constants WHERE const1='abcd' AND co
UPDATE target.feature_explode_map_with_constants SET time=7398730800000 WHERE const1='abcd' AND const2=1234 AND key='key3' AND fruit='apples';
UPDATE target.feature_explode_map_with_constants SET fruit_qty=999 WHERE const1='abcd' AND const2=1234 AND key='key3' AND fruit='oranges';
DELETE FROM target.feature_explode_map_with_constants WHERE const1='abcd' AND const2=1234 AND key='key3' AND fruit='kiwi';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.feature_explode_map_with_constants(key,time,fruits) VALUES ('key1','1087383600000', {'apples': 3, 'oranges': 5, 'bananas': 2, 'grapes': 11});
INSERT INTO origin.feature_explode_map_with_constants(key,time,fruits) VALUES ('key2','1087383600000', {'apples': 4, 'oranges': 6, 'bananas': 3, 'pears': 7});
INSERT INTO origin.feature_explode_map_with_constants(key,time,fruits) VALUES ('key3','1087383600000', {'apples': 5, 'oranges': 7, 'bananas': 4, 'kiwi': 42});

SELECT * FROM target.feature_explode_map_with_constants;
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
limitations under the License.
*/

DELETE FROM target.column_rename_with_constants_and_explode WHERE customer='CUSTOMER' AND year=2023 AND key_name='key2';
UPDATE target.column_rename_with_constants_and_explode SET fruit_qty=999 WHERE customer='CUSTOMER' AND year=2023 AND key_name='key3' AND fruit='oranges';
DELETE FROM target.column_rename_with_constants_and_explode WHERE customer='CUSTOMER' AND year=2023 AND key_name='key3' AND fruit='kiwi';
DELETE FROM target.column_rename_with_constants_and_explode WHERE customer='CUSTOMER' AND year=2023 AND key_name='key2';
UPDATE target.column_rename_with_constants_and_explode SET fruit_qty=999 WHERE customer='CUSTOMER' AND year=2023 AND key_name='key3' AND fruit='oranges';
DELETE FROM target.column_rename_with_constants_and_explode WHERE customer='CUSTOMER' AND year=2023 AND key_name='key3' AND fruit='kiwi';
SELECT * FROM target.column_rename_with_constants_and_explode;

2 changes: 1 addition & 1 deletion SIT/regression/03_performance/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@
limitations under the License.
*/

DELETE FROM target.regression_performance WHERE pk_id = 600;
DELETE FROM target.regression_performance WHERE pk_id = 600;
SELECT * FROM target.regression_performance WHERE pk_id >= 400 AND pk_id < 700 ALLOW FILTERING;
3 changes: 3 additions & 0 deletions SIT/regression/03_performance/execute.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,8 @@ dsbulk load -url $workingDir/data.csv -h $CASS_CLUSTER -u $CASS_USERNAME -p $CAS
dsbulk load -url $workingDir/data_break.csv -h $CASS_CLUSTER -u $CASS_USERNAME -p $CASS_PASSWORD -k target -t regression_performance
cqlsh -u $CASS_USERNAME -p $CASS_PASSWORD $CASS_CLUSTER -f $workingDir/breakData.cql > $workingDir/breakData.out 2> $workingDir/breakData.err

# This upsert to origin will update the writetime on origin to be newer than target
dsbulk load -url $workingDir/data.csv -h $CASS_CLUSTER -u $CASS_USERNAME -p $CASS_PASSWORD -k origin -t regression_performance

/local/cdm.sh -f cdm.txt -s fixData -d "$workingDir" > cdm.fixData.out 2>cdm.fixData.err
/local/cdm-assert.sh -f cdm.fixData.out -a cdm.fixData.assert -d "$workingDir"
1 change: 0 additions & 1 deletion SIT/regression/03_performance/migrate.properties
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# limitations under the License.
#


spark.cdm.connect.origin.host cdm-sit-cass
spark.cdm.connect.target.host cdm-sit-cass

Expand Down
6 changes: 6 additions & 0 deletions SIT/smoke/02_autocorrect_kvp/breakData.cql
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,11 @@

DELETE FROM target.smoke_autocorrect_kvp WHERE key='key2';
UPDATE target.smoke_autocorrect_kvp SET value='value999' WHERE key='key3';

# This upsert to origin will update the writetime on origin to be newer than target
INSERT INTO origin.smoke_autocorrect_kvp(key,value) VALUES ('key1','valueA');
INSERT INTO origin.smoke_autocorrect_kvp(key,value) VALUES ('key2','valueB');
INSERT INTO origin.smoke_autocorrect_kvp(key,value) VALUES ('key3','valueC');

SELECT * FROM target.smoke_autocorrect_kvp;

30 changes: 18 additions & 12 deletions SIT/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,16 @@ fi
. common.sh

_captureOutput() {
_info "Copying ${DOCKER_CDM}:/${testDir} into ${testDir}/output"
docker cp ${DOCKER_CDM}:/${testDir} ${testDir}/output
_info "Moving ${testDir}/output/$(basename ${testDir})/*.out TO ${testDir}/output"
mv ${testDir}/output/$(basename ${testDir})/*.out ${testDir}/output
_info "Moving ${testDir}/output/$(basename ${testDir})/*.err TO ${testDir}/output"
mv ${testDir}/output/$(basename ${testDir})/*.err ${testDir}/output
_info "Copying ${DOCKER_CDM}:/${testDir} into ${testDir}/output/"
docker cp ${DOCKER_CDM}:/${testDir} ${testDir}/output/
_info "Moving ${testDir}/output/$(basename ${testDir})/*.out TO ${testDir}/output/"
mv -v ${testDir}/output/$(basename ${testDir})/*.out ${testDir}/output/
_info "Moving ${testDir}/output/$(basename ${testDir})/*.err TO ${testDir}/output/"
mv -v ${testDir}/output/$(basename ${testDir})/*.err ${testDir}/output/
_info "Moving ${testDir}/output/$(basename ${testDir})/output/*.out TO ${testDir}/output/"
mv -v ${testDir}/output/$(basename ${testDir})/output/*.out ${testDir}/output/
_info "Moving ${testDir}/output/$(basename ${testDir})/output/*.err TO ${testDir}/output/"
mv -v ${testDir}/output/$(basename ${testDir})/output/*.err ${testDir}/output/
_info "Removing ${testDir}/output/$(basename ${testDir})"
rm -rf ${testDir}/output/$(basename ${testDir})
}
Expand Down Expand Up @@ -68,7 +72,7 @@ for testDir in $(ls -d ${PHASE}/*); do

# Clean up any previous results that may exist
for f in ${GENERATED_FILES}; do
rm -f ${testDir}/$f
rm -rf ${testDir}/$f
done
rm -rf ${testDir}/output/*
mkdir -p ${testDir}/output
Expand Down Expand Up @@ -124,17 +128,17 @@ errors=0
for testDir in $(ls -d ${PHASE}/*); do
export testDir
_info ${testDir} Executing test
docker exec ${DOCKER_CDM} bash -e $testDir/execute.sh /$testDir > $testDir/output/execute.out 2>$testDir/output/execute.err
docker exec ${DOCKER_CDM} bash -e -c "$testDir/execute.sh /$testDir > $testDir/output/execute.out 2>$testDir/output/execute.err"
if [ $? -ne 0 ]; then
_error "${testDir}/execute.sh failed, see $testDir/output/execute.out and $testDir/output/execute.err"
echo "=-=-=-=-=-=-=-=-=-= Directory Listing =-=-=-=-=-=-=-=-=-=-"
echo "$(ls -laR ${testDir})"
echo "=-=-=-=-=-=-=- Container Directory Listing -=-=-=-=-=-=-=-"
echo "$(docker exec ${DOCKER_CDM} ls -laR ${testDir})"
echo "=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-==-=-=-=-=-=-=-=-=-=-=-=-"
_captureOutput
errors=1
fi
done
if [ $errors -ne 0 ]; then
_captureOutput
_fatal "One or more execute.sh failed. See above ERROR(s) for details."
fi

Expand All @@ -156,6 +160,7 @@ for testDir in $(ls -d ${PHASE}/*); do
fi
if [ $? -ne 0 ]; then
_error "${testDir}/expected.cql failed, see $testDir/output/actual.out $testDir/output/and actual.err"
_captureOutput
errors=1
continue
fi
Expand All @@ -164,18 +169,19 @@ for testDir in $(ls -d ${PHASE}/*); do
if [ $rtn -eq 1 ]; then
_error "${testDir} files differ (expected vs actual):"
sdiff -w 200 ${testDir}/expected.out ${testDir}/output/actual.out
_captureOutput
errors=1
continue
elif [ $rtn -ne 0 ]; then
_error "${testDir} had some other problem running diff"
_captureOutput
errors=1
continue
fi

_info "PASS: ${testDir} returned expected results"
done
if [ $errors -ne 0 ]; then
_captureOutput
_fatal "One or more expected results failed. See above ERROR(s) for details."
fi

Expand Down
1 change: 0 additions & 1 deletion src/main/java/com/datastax/cdm/feature/WritetimeTTL.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ public boolean initializeAndValidate(CqlTable originTable, CqlTable targetTable)
logger.error("Counter table cannot specify TTL or WriteTimestamp columns as they cannot set on write");
isValid = false;
isEnabled = false;
return false;
}

logger.info("Counter table does not support TTL or WriteTimestamp columns as they cannot set on write, so feature is disabled");
Expand Down

0 comments on commit 025b73e

Please sign in to comment.