@@ -19,6 +19,7 @@ srcdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
1919
2020cd " $srcdir "
2121
22+ # shellcheck disable=SC1091
2223. ./utils.sh
2324
2425section " Spark JSON => Avro"
@@ -30,9 +31,9 @@ if is_inside_docker; then
3031fi
3132
3233# don't support Spark <= 1.3 due to difference in databricks avro dependency
33- export SPARK_VERSIONS=" ${@ :- 1.4.0 1.5.1 1.6.2} "
34+ export SPARK_VERSIONS=" ${* :- 1.4.0 1.5.1 1.6.2} "
3435# requires upgrade to spark-avro 3.0.0
35- # export SPARK_VERSIONS="${@ :-2.0.0}"
36+ # export SPARK_VERSIONS="${* :-2.0.0}"
3637
3738for SPARK_VERSION in $SPARK_VERSIONS ; do
3839 dir=" spark-$SPARK_VERSION -bin-hadoop2.6"
@@ -57,9 +58,12 @@ for SPARK_VERSION in $SPARK_VERSIONS; do
5758 # resolved, was due to Spark 1.4+ requiring pyspark-shell for PYSPARK_SUBMIT_ARGS
5859
5960 rm -fr " test-$dir .avro"
60- ../spark_json_to_avro.py -j data/multirecord.json -a " test-$dir .avro" &&
61- echo " SUCCEEDED with header with Spark $SPARK_VERSION " ||
62- { echo " FAILED with header with Spark $SPARK_VERSION " ; exit 1; }
61+ if ../spark_json_to_avro.py -j data/multirecord.json -a " test-$dir .avro" ; then
62+ echo " SUCCEEDED with header with Spark $SPARK_VERSION "
63+ else
64+ echo " FAILED with header with Spark $SPARK_VERSION "
65+ exit 1
66+ fi
6367
6468 # ../spark_json_to_avro.py -j data/multirecord.json -a "test-$dir.avro" -s Year:String,Make,Model,Dimension.0.Length:float &&
6569 # echo "SUCCEEDED with header with Spark $SPARK_VERSION" ||
0 commit comments