11sudo : required
22language : java
33jdk :
4- - openjdk7
4+ - openjdk11
5+ - openjdk8
56before_install :
67 - cat /etc/hosts # optionally check the content *before*
78 - sudo hostname "$(hostname | cut -c1-63)"
@@ -10,32 +11,47 @@ before_install:
1011 - cat /proc/cpuinfo | grep cores | wc -l
1112 - free -h
1213install :
13- - hibench=$(pwd)
14- - cd /opt/
15- - wget http://d3kbcqa49mib13.cloudfront.net/spark-1.6.0-bin-hadoop2.6.tgz
16- - tar -xzf spark-1.6.0-bin-hadoop2.6.tgz
17- - wget https://archive.apache.org/dist/hadoop/core/hadoop-2.6.5/hadoop-2.6.5.tar.gz
18- - tar -xzf hadoop-2.6.5.tar.gz
19- - cd ${hibench}
20- - cp ./travis/spark-env.sh /opt/spark-1.6.0-bin-hadoop2.6/conf/
21- - cp ./travis/core-site.xml /opt/hadoop-2.6.5/etc/hadoop/
22- - cp ./travis/hdfs-site.xml /opt/hadoop-2.6.5/etc/hadoop/
23- - cp ./travis/mapred-site.xml /opt/hadoop-2.6.5/etc/hadoop/
24- - cp ./travis/yarn-site.xml /opt/hadoop-2.6.5/etc/hadoop/
25- - cp ./travis/hibench.conf ./conf/
26- - cp ./travis/benchmarks.lst ./conf/
14+ - |
15+ jc_ver="$(javac -version 2>&1| cut -d ' ' -f2)"
16+ if [[ "$jc_ver" =~ 11 ]]; then
17+ echo "No Hadoop and Spark config implementation for ${jc_ver} yet."
18+ elif [[ "$jc_ver" =~ 8 ]]; then
19+ hibench=$(pwd)
20+ cd /opt/
21+ wget http://d3kbcqa49mib13.cloudfront.net/spark-1.6.0-bin-hadoop2.6.tgz
22+ tar -xzf spark-1.6.0-bin-hadoop2.6.tgz
23+ wget https://archive.apache.org/dist/hadoop/core/hadoop-2.6.5/hadoop-2.6.5.tar.gz
24+ tar -xzf hadoop-2.6.5.tar.gz
25+ cd ${hibench}
26+ cp ./travis/spark-env.sh /opt/spark-1.6.0-bin-hadoop2.6/conf/
27+ cp ./travis/core-site.xml /opt/hadoop-2.6.5/etc/hadoop/
28+ cp ./travis/hdfs-site.xml /opt/hadoop-2.6.5/etc/hadoop/
29+ cp ./travis/mapred-site.xml /opt/hadoop-2.6.5/etc/hadoop/
30+ cp ./travis/yarn-site.xml /opt/hadoop-2.6.5/etc/hadoop/
31+ cp ./travis/hibench.conf ./conf/
32+ cp ./travis/benchmarks.lst ./conf/
33+ echo "export JAVA_HOME=${JAVA_HOME}" >> /opt/hadoop-2.6.5/etc/hadoop/hadoop-env.sh
34+ fi
2735before_script :
2836 - " export JAVA_OPTS=-Xmx512m"
2937cache :
3038 directories :
3139 - $HOME/.m2
3240script :
33- - mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.2 -Dscala=2.11
34- - mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.0 -Dscala=2.11
35- - mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=1.6 -Dscala=2.10
36- - sudo -E ./travis/configssh.sh
37- - sudo -E ./travis/restart_hadoop_spark.sh
38- - cp ./travis/hadoop.conf ./conf/
39- - cp ./travis/spark.conf ./conf/
40- - /opt/hadoop-2.6.5/bin/yarn node -list 2
41- - sudo -E ./bin/run_all.sh
41+ - |
42+ jc_ver="$(javac -version 2>&1| cut -d ' ' -f2)"
43+ if [[ "$jc_ver" =~ 11 ]]; then
44+ mvn clean package -q -Psparkbench -Pflinkbench -Phadoopbench -Pstormbench -Dmaven.javadoc.skip=true -Dhadoop=3.2 -Dspark=2.4 -Dscala=2.12
45+ elif [[ "$jc_ver" =~ 8 ]]; then
46+ mvn clean package -q -Dmaven.javadoc.skip=true -Dhadoop=3.2 -Dspark=2.4 -Dscala=2.12
47+ mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.2 -Dscala=2.11
48+ mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.0 -Dscala=2.11
49+ mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=1.6 -Dscala=2.10
50+ sudo -E ./travis/restart_hadoop_spark.sh
51+ cp ./travis/hadoop.conf ./conf/
52+ cp ./travis/spark.conf ./conf/
53+ /opt/hadoop-2.6.5/bin/yarn node -list 2
54+ sudo -E ./bin/run_all.sh
55+ else
56+ exit 1
57+ fi
0 commit comments