22
33set -e
44
5- echo " Checking for updates to workbench-template from GitHub "
5+ echo " Checking for updates... "
66
77WORKSPACE_DIR=" /workspaces/spark-template"
8-
9- MARKER_DIR=" /var/lib/spark/.versions"
10- RELEASE_MARKER_FILE=" $MARKER_DIR /release"
11- TOOLS_MARKER_FILE=" $MARKER_DIR /tools"
12-
13- sudo mkdir -p " $MARKER_DIR "
14-
15- # Fetch the latest release information
168LATEST_RELEASE=$( curl -s https://api.github.com/repos/github/spark-template/releases/latest)
17-
18- # Check if marker file exists and has the same release ID
19- RELEASE_ID=$( echo " $LATEST_RELEASE " | jq -r ' .id' )
20- if [ -f " $RELEASE_MARKER_FILE " ] && [ " $( cat " $RELEASE_MARKER_FILE " ) " == " $RELEASE_ID " ]; then
21- echo " Already at the latest release. Skipping download."
22- exit 0
23- fi
24-
259echo " New version found. Downloading latest release."
2610
2711TEMP_DIR=$( mktemp -d)
@@ -32,53 +16,8 @@ curl -L -o dist.zip -H "Accept: application/octet-stream" "$DOWNLOAD_URL"
3216
3317unzip -o dist.zip
3418rm dist.zip
19+ DIST_DIR=" spark-sdk-dist"
3520
36- # Upgrade the Spark Runtime tools
37- sudo mv ./spark-sdk-dist/server.js /usr/local/bin/spark-server
38- sudo mv ./spark-sdk-dist/designer.js /usr/local/bin/spark-designer
39- sudo mv ./spark-sdk-dist/upload-to-remote.sh /usr/local/bin/upload-to-remote.sh
40- sudo mv ./spark-sdk-dist/deploy.sh /usr/local/bin/deploy.sh
41- sudo mv ./spark-sdk-dist/shutdown.sh /usr/local/bin/shutdown.sh
42- sudo mv ./spark-sdk-dist/hydrate.sh /usr/local/bin/hydrate.sh
43- sudo mv ./spark-sdk-dist/file-syncer.js /usr/local/bin/spark-file-syncer
44- sudo cp ./spark-sdk-dist/proxy.js /workspaces/proxy.js
45- sudo mv ./spark-sdk-dist/proxy.js /usr/local/bin/proxy.js
46- sudo mv ./spark-sdk-dist/spark.package.json /workspaces/spark.package.json
47- sudo mv ./spark-sdk-dist/static-preview-build.sh /usr/local/bin/static-preview-build.sh
48- sudo mv ./spark-sdk-dist/post-commit /usr/local/bin/post-commit
49-
50- # Upgrade the Spark Tools package
51- if [ -f " $TOOLS_MARKER_FILE " ] && [ " $( cat " $TOOLS_MARKER_FILE " ) " == " $( cat ./spark-sdk-dist/spark-tools-version) " ]; then
52- echo " Already at the latest tools version. Skipping extraction."
53- else
54- tar -xzf ./spark-sdk-dist/spark-tools.tgz
55-
56- sudo rm -rf $WORKSPACE_DIR /packages/spark-tools
57- mkdir -p $WORKSPACE_DIR /packages/spark-tools
58- sudo mv ./package/* $WORKSPACE_DIR /packages/spark-tools
59- sudo rmdir ./package
60-
61- cd $WORKSPACE_DIR
62- npm i -f
63- cd - > /dev/null
64-
65- sudo cp ./spark-sdk-dist/spark-tools-version " $TOOLS_MARKER_FILE "
66- fi
67-
68- # Upgrade the GH CLI extension
69- sudo rm -rf /usr/local/bin/gh-spark-cli
70- sudo mv spark-sdk-dist/gh-spark-cli /usr/local/bin/
71- cd /usr/local/bin/gh-spark-cli
72- # The --force option on gh extension install isn't honored for local, so manually remove it first.
73- # It's not an issue if that fails though as that probably just means it was the first time running this.
74- gh extension remove spark-cli > /dev/null || true
75- gh extension install .
76- gh alias set spark spark-cli --clobber
77- cd - > /dev/null
78-
79- rm -rf $TEMP_DIR
80-
81- # Update marker file with latest release ID
82- echo " $RELEASE_ID " | sudo tee " $RELEASE_MARKER_FILE " > /dev/null
83-
84- echo " Tools installed successfully."
21+ bash spark-sdk-dist/repair.sh
22+ LATEST_RELEASE=" $LATEST_RELEASE " DIST_DIR=" $DIST_DIR " WORKSPACE_DIR=" $WORKSPACE_DIR " bash spark-sdk-dist/install-tools.sh
23+ rm -rf $TEMP_DIR
0 commit comments