Skip to content

Commit

Permalink
Update to latest lint / shellcheck
Browse files Browse the repository at this point in the history
  • Loading branch information
craigcondit committed Jul 13, 2023
1 parent bd4a3eb commit 2ab4fc4
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 10 deletions.
1 change: 0 additions & 1 deletion .spark_version

This file was deleted.

6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ DOCKER_ARCH := amd64
endif

# shellcheck
SHELLCHECK_VERSION=v0.8.0
SHELLCHECK_VERSION=v0.9.0
SHELLCHECK_BIN=${TOOLS_DIR}/shellcheck
SHELLCHECK_ARCHIVE := shellcheck-$(SHELLCHECK_VERSION).$(OS).$(HOST_ARCH).tar.xz
ifeq (darwin, $(OS))
Expand All @@ -106,7 +106,7 @@ endif
endif

# golangci-lint
GOLANGCI_LINT_VERSION=1.51.2
GOLANGCI_LINT_VERSION=1.53.3
GOLANGCI_LINT_BIN=$(TOOLS_DIR)/golangci-lint
GOLANGCI_LINT_ARCHIVE=golangci-lint-$(GOLANGCI_LINT_VERSION)-$(OS)-$(EXEC_ARCH).tar.gz
GOLANGCI_LINT_ARCHIVEBASE=golangci-lint-$(GOLANGCI_LINT_VERSION)-$(OS)-$(EXEC_ARCH)
Expand All @@ -126,7 +126,7 @@ HELM_ARCHIVE=helm-$(HELM_VERSION)-$(OS)-$(EXEC_ARCH).tar.gz
HELM_ARCHIVE_BASE=$(OS)-$(EXEC_ARCH)

# spark
export SPARK_VERSION=$(shell cat .spark_version)
export SPARK_VERSION=3.3.1
export SPARK_HOME=$(BASE_DIR)$(TOOLS_DIR)/spark
export SPARK_SUBMIT_CMD=$(SPARK_HOME)/bin/spark-submit
export SPARK_PYTHON_IMAGE=docker.io/apache/spark-py:v$(SPARK_VERSION)
Expand Down
11 changes: 5 additions & 6 deletions deployments/examples/spark/cmd/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ usage() {
case "$1" in
--help)
usage
exit 0
;;
esac

Expand Down Expand Up @@ -101,14 +100,14 @@ if [ -f "$SPARK_BINARY_FILE_PATH" ]; then
echo "The binary file $SPARK_BINARY_FILE_NAME has been cached!"
else
echo "The binary file $SPARK_BINARY_FILE_NAME did not exist, try to download."
wget https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/${SPARK_BINARY_FILE_NAME} -O "${SPARK_BINARY_FILE_PATH}"
wget "https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/${SPARK_BINARY_FILE_NAME}" -O "${SPARK_BINARY_FILE_PATH}"
fi

if [ -f "$SPARK_BINARY_FILE_CHECKSUM_FILE_PATH" ]; then
echo "The binary checksum file $SPARK_BINARY_FILE_CHECKSUM_FILE_NAME has been cached!"
else
echo "The binary checksum file $SPARK_BINARY_FILE_CHECKSUM_FILE_NAME did not exist, try to download."
wget http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/${SPARK_BINARY_FILE_CHECKSUM_FILE_NAME} -O "${SPARK_BINARY_FILE_CHECKSUM_FILE_PATH}"
wget "http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/${SPARK_BINARY_FILE_CHECKSUM_FILE_NAME}" -O "${SPARK_BINARY_FILE_CHECKSUM_FILE_PATH}"
fi

if [[ "$SPARK_VERSION" < "3.2.1" || "$SPARK_VERSION" == "3.2.1" ]]; then
Expand All @@ -123,7 +122,7 @@ if [[ "$SPARK_VERSION" < "3.2.1" || "$SPARK_VERSION" == "3.2.1" ]]; then
if [[ 'OK' == $(shasum -c -a 512 "$FORMATTED_SPARK_BINARY_FILE_CHECKSUM_FILE_PATH" | awk '{print $2}') ]]; then
echo "The checksum is matched!"
echo "Try to remove the old unpacked dir and re-uncompress it"
rm -rf "$WORK_SPACE_ROOT"/spark-${SPARK_VERSION}-bin-hadoop${SPARK_HADOOP_VERSION}
rm -rf "$WORK_SPACE_ROOT"/spark-"${SPARK_VERSION}"-bin-hadoop"${SPARK_HADOOP_VERSION}"
tar -xvzf "$SPARK_BINARY_FILE_PATH" -C "$WORK_SPACE_ROOT"
else
echo "The checksum is not matched, Removing the incompleted file, please download it again."
Expand All @@ -135,7 +134,7 @@ else
if [[ $(shasum -a 512 "$SPARK_BINARY_FILE_PATH" | awk '{print $1}') == $(awk '{print $1}' < "$SPARK_BINARY_FILE_CHECKSUM_FILE_NAME") ]]; then
echo "The checksum is matched!"
echo "Try to remove the old unpacked dir and re-uncompress it"
rm -rf "$WORK_SPACE_ROOT"/spark-${SPARK_VERSION}-bin-hadoop${SPARK_HADOOP_VERSION}
rm -rf "$WORK_SPACE_ROOT"/spark-"${SPARK_VERSION}"-bin-hadoop"${SPARK_HADOOP_VERSION}"
tar -xvzf "$SPARK_BINARY_FILE_PATH" -C "$WORK_SPACE_ROOT"
else
echo "The checksum is not matched, Removing the incompleted file, please download it again."
Expand Down Expand Up @@ -195,7 +194,7 @@ EOF
--conf spark.executor.instances=${SPARK_EXECUTOR_NUM} \
--conf spark.kubernetes.namespace=spark-test \
--conf spark.kubernetes.executor.request.cores=1 \
--conf spark.kubernetes.container.image=${SPARK_DOCKER_IMAGE} \
--conf spark.kubernetes.container.image="${SPARK_DOCKER_IMAGE}" \
--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \
--conf spark.kubernetes.driver.podTemplateFile=../driver.yaml \
--conf spark.kubernetes.executor.podTemplateFile=../executor.yaml \
Expand Down

0 comments on commit 2ab4fc4

Please sign in to comment.