- job: name: jdk10-terasort-benchmark project-type: matrix defaults: global description: | * Runs the terasort benchmark. properties: - authorization: linaro: - job-read openjdk-members: - job-extended-read - job-build - job-cancel - build-discarder: days-to-keep: 30 num-to-keep: 10 artifact-num-to-keep: 5 disabled: false node: aarch64-06 display-name: 'OpenJDK 10 - Run terasort benchmark' axes: - axis: type: user-defined name: JVM_VARIANT values: - server - client - axis: type: user-defined name: BUILD_TYPE values: - release - axis: type: slave name: label values: - aarch64-06 execution-strategy: sequential: true wrappers: - workspace-cleanup: dirmatch: false - timestamps - matrix-tie-parent: node: aarch64-06 builders: # copyartifact is slow and the file is local # copy instead of going back and forth between master <-> slave # - copyartifact: # project: jdk10-build-image # filter: 'out/jdk10-${JVM_VARIANT}-${BUILD_TYPE}.tar.gz' # target: incoming # flatten: true - copyartifact: project: openjdk8-hadoop-LCA14 filter: 'out/openjdk8-hadoop-LCA14.tar.gz' target: incoming flatten: true - shell: | #!/bin/bash set -exu NGIGABYTES=1 # client variant uses server with -XX:TieredStopAtLevel=1 # zero is a third possible value. REAL_VARIANT=${JVM_VARIANT/client/server} ## Extract jdk rm -rf jdk10* tar xf ~/workspace/jdk10-build-image/BUILD_TYPE/${BUILD_TYPE}/JVM_VARIANT/${REAL_VARIANT}/label/${NODE_NAME}/out/jdk10-${REAL_VARIANT}-${BUILD_TYPE}.tar.gz export JAVA_HOME=${WORKSPACE}/jdk10-${REAL_VARIANT}-${BUILD_TYPE} export PATH=${JAVA_HOME}/bin:$PATH ## Extract Hadoop pre-builts rm -rf openjdk8-hadooop-LCA14 tar xf incoming/openjdk8-hadoop-LCA14.tar.gz ## Benchmark # FIXME #(cd incoming; wget --progress=dot -e dotbytes=10M http://openjdk-apm1/openjdk8-build-artefacts/${NGIGABYTES}GB.tar.gz; tar xf ${NGIGABYTES}GB.tar.gz) (cd incoming; tar xf ~/srv/hadoop-terasort-reference-files/${NGIGABYTES}GB.tar.gz) TERAGEN_BASELINE_DIR=${WORKSPACE}/incoming HADOOP_DIR=${WORKSPACE}/openjdk8-hadoop-LCA14 rm -rf out mkdir out sed -i '/^export JAVA_HOME=/d' ${HADOOP_DIR}/conf/hadoop-env.sh echo "export JAVA_HOME=$JAVA_HOME" >> ${HADOOP_DIR}/conf/hadoop-env.sh sed -i 's|/work/${user.name}/hadoop-tmp|${user.home}/hadoop/tmp|' ${HADOOP_DIR}/conf/core-site.xml source ${HADOOP_DIR}/env.sh which hadoop which java which hdfs java -version stop-dfs.sh stop-yarn.sh rm -rf ${HOME}/hadoop mkdir -p ${HOME}/hadoop/tmp hdfs namenode -format -force start-dfs.sh start-yarn.sh # Need time for the datanodes to materialise. sleep 30 jps hadoop fs -mkdir -p /user/$USER hadoop fs -copyFromLocal $TERAGEN_BASELINE_DIR/${NGIGABYTES}GB /user/$USER trap "stop-dfs.sh; stop-yarn.sh" EXIT # Use only C1 compiler for client runs. if [ $JVM_VARIANT = "client" ]; then VARIANT_OPT=-XX:TieredStopAtLevel=1 else VARIANT_OPT=-Dharmless.property fi elapsed_time_file=$(mktemp /tmp/benchmark-terasort-XXXXXX.$$) HADOOP_OPTS=${VARIANT_OPT} /usr/bin/time -o $elapsed_time_file -f "%e" terasort ${NGIGABYTES}GB ${NGIGABYTES}GB-sorted hadoop fs -rm -R ${NGIGABYTES}GB-sorted sed -i 's/\..*//' $elapsed_time_file elapsed_time=$(cat $elapsed_time_file) date_as_epoch=$(date --date="$(date +'%Y-%m-%d')" +%s) echo "$date_as_epoch,$JVM_VARIANT,$NGIGABYTES,$elapsed_time" > out/terasort-results-${JVM_VARIANT}-${BUILD_TYPE}.csv rm -rf incoming/${NGIGABYTES}* publishers: - archive: artifacts: 'out/terasort-results-*.csv'