Skip to content

Commit e757d85

Browse files
committed
latest script generated from templates
1 parent b02fbd6 commit e757d85

File tree

1 file changed

+17
-49
lines changed

1 file changed

+17
-49
lines changed

dask/dask.sh

Lines changed: 17 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -14,15 +14,12 @@
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
1616

17-
#
18-
#
19-
# Google Cloud Dataproc Initialization Actions v0.0.1
2017
#
2118
# This initialization action is generated from
2219
# initialization-actions/templates/dask/dask.sh.in
2320
#
24-
# Modifications made directly to generated files will be lost when the
25-
# templates are next evaluated.
21+
# Modifications made directly to the generated file will be lost when
22+
# the template is re-evaluated
2623

2724
#
2825
# This initialization action script will install Dask and other relevant
@@ -363,25 +360,6 @@ function check_os() {
363360
exit 1
364361
fi
365362

366-
SPARK_VERSION="$(spark-submit --version 2>&1 | sed -n 's/.*version[[:blank:]]\+\([0-9]\+\.[0-9]\).*/\1/p' | head -n1)"
367-
readonly SPARK_VERSION
368-
if version_lt "${SPARK_VERSION}" "3.1" || \
369-
version_ge "${SPARK_VERSION}" "4.0" ; then
370-
echo "Error: Your Spark version is not supported. Please upgrade Spark to one of the supported versions."
371-
exit 1
372-
fi
373-
374-
# Detect dataproc image version
375-
if (! test -v DATAPROC_IMAGE_VERSION) ; then
376-
if test -v DATAPROC_VERSION ; then
377-
DATAPROC_IMAGE_VERSION="${DATAPROC_VERSION}"
378-
else
379-
if version_lt "${SPARK_VERSION}" "3.2" ; then DATAPROC_IMAGE_VERSION="2.0"
380-
elif version_lt "${SPARK_VERSION}" "3.4" ; then DATAPROC_IMAGE_VERSION="2.1"
381-
elif version_lt "${SPARK_VERSION}" "3.6" ; then DATAPROC_IMAGE_VERSION="2.2"
382-
else echo "Unknown dataproc image version" ; exit 1 ; fi
383-
fi
384-
fi
385363
}
386364

387365
function configure_dkms_certs() {
@@ -544,42 +522,30 @@ function prepare_conda_env() {
544522
}
545523

546524
function prepare_common_env() {
547-
define_os_comparison_functions
548-
549525
# Verify OS compatability and Secure boot state
550526
check_os
551527
check_secure_boot
552528

553-
readonly _shortname="$(os_id)$(os_version|perl -pe 's/(\d+).*/$1/')"
554-
555-
# Dataproc configurations
556-
readonly HADOOP_CONF_DIR='/etc/hadoop/conf'
557-
readonly HIVE_CONF_DIR='/etc/hive/conf'
558-
readonly SPARK_CONF_DIR='/etc/spark/conf'
559-
529+
# read-only configuration variables
530+
_shortname="$(os_id)$(os_version|perl -pe 's/(\d+).*/$1/')"
531+
HADOOP_CONF_DIR='/etc/hadoop/conf'
532+
HIVE_CONF_DIR='/etc/hive/conf'
560533
OS_NAME="$(lsb_release -is | tr '[:upper:]' '[:lower:]')"
561-
readonly OS_NAME
562-
563-
# node role
564534
ROLE="$(get_metadata_attribute dataproc-role)"
565-
readonly ROLE
566-
567-
# master node
568535
MASTER="$(get_metadata_attribute dataproc-master)"
569-
readonly MASTER
570-
571536
workdir=/opt/install-dpgce
572-
tmpdir=/tmp/
573537
temp_bucket="$(get_metadata_attribute dataproc-temp-bucket)"
574-
readonly temp_bucket
575-
readonly pkg_bucket="gs://${temp_bucket}/dpgce-packages"
538+
pkg_bucket="gs://${temp_bucket}/dpgce-packages"
576539
uname_r=$(uname -r)
577-
readonly uname_r
578-
readonly bdcfg="/usr/local/bin/bdconfig"
579-
export DEBIAN_FRONTEND=noninteractive
540+
bdcfg="/usr/local/bin/bdconfig"
541+
KNOX_HOME=/usr/lib/knox
580542

581-
# Knox config
582-
readonly KNOX_HOME=/usr/lib/knox
543+
readonly HADOOP_CONF_DIR HIVE_CONF_DIR OS_NAME ROLE MASTER workdir
544+
readonly temp_bucket pkg_bucket uname_r bdconfig KNOX_HOME
545+
546+
tmpdir=/tmp/
547+
548+
export DEBIAN_FRONTEND=noninteractive
583549

584550
mkdir -p "${workdir}/complete"
585551
set_proxy
@@ -720,6 +686,8 @@ print( " samples-taken: ", scalar @siz, $/,
720686
echo "exit_handler has completed"
721687
}
722688

689+
define_os_comparison_functions
690+
723691

724692
function configure_dask_yarn() {
725693
readonly DASK_YARN_CONFIG_DIR=/etc/dask/

0 commit comments

Comments
 (0)