Skip to content

Commit

Permalink
Merge branch 'master' into php7.1
Browse files Browse the repository at this point in the history
  • Loading branch information
manuelkiessling committed Jan 19, 2017
2 parents a57b8e7 + 5d451ea commit e22f0cb
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 19 deletions.
7 changes: 7 additions & 0 deletions puppet/modules/app-analyze/manifests/init.pp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@
# We need the cassandra-tools in order to be able to run cqlsh
require cassandra::packages

# we need maven during app deployment
package { [
"maven"
]:
ensure => "installed",
}

service { "cassandra":
ensure => "stopped",
hasstatus => true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ MAILTO=""
* * * * * root /opt/simplecd/simplecd.sh tag travisci-build-master-* https://github.com/journeymonitor/analyze.git https://github.com/journeymonitor/analyze/commit/ 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/simplecd/journeymonitor-analyze.log
<% end %>

*/5 * * * * journeymonitor /usr/bin/curl <%= scope.function_hiera(['app-analyze::endpoint_monitor_api']) %> -o <%= scope.function_hiera(['app-analyze::testresults_cache_path']) %> 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log && /bin/bash -c '. /etc/journeymonitor/app-analyze-env.sh && /usr/bin/java -jar /opt/journeymonitor/analyze/journeymonitor-analyze-importer-assembly.jar <%= scope.function_hiera(['app-analyze::testresults_cache_path']) %> 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log' && /bin/bash -c '. /etc/journeymonitor/app-analyze-env.sh && /opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/bin/spark-submit --deploy-mode client --conf spark.eventLog.enabled=true --executor-memory 2g --master spark://<%= scope.function_hiera(['app-analyze::spark_master_address']) %>:7077 /opt/journeymonitor/analyze/journeymonitor-analyze-spark-assembly.jar 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log'
*/5 * * * * journeymonitor /usr/bin/curl <%= scope.function_hiera(['app-analyze::endpoint_monitor_api']) %> -o <%= scope.function_hiera(['app-analyze::testresults_cache_path']) %> 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log && /bin/bash -c '. /etc/journeymonitor/app-analyze-env.sh && /usr/bin/java -jar /opt/journeymonitor/analyze/journeymonitor-analyze-importer-assembly.jar <%= scope.function_hiera(['app-analyze::testresults_cache_path']) %> 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log' && /bin/bash -c '. /etc/journeymonitor/app-analyze-env.sh && /opt/spark-2.0.2-bin-hadoop2.7/bin/spark-submit --deploy-mode client --conf spark.eventLog.enabled=true --executor-memory 2g --master spark://<%= scope.function_hiera(['app-analyze::spark_master_address']) %>:7077 /opt/journeymonitor/analyze/spark-1.0-SNAPSHOT-jar-with-dependencies.jar 2>&1 | while IFS= read -r line;do echo "$(date) $line";done >> /var/tmp/journeymonitor-analyze-import-and-spark.log'
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
MAILTO=""

# Remove Spark work data older than 1 day
0 0 * * * root /usr/bin/find /opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/work/* -maxdepth 0 -type d -mtime +1 | /usr/bin/xargs /bin/rm -rf
0 0 * * * root /usr/bin/find /opt/spark-2.0.2-bin-hadoop2.7/work/* -maxdepth 0 -type d -mtime +1 | /usr/bin/xargs /bin/rm -rf
16 changes: 7 additions & 9 deletions puppet/modules/spark-master/manifests/init.pp
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,25 @@
}

exec { "download spark":
command => '/usr/bin/curl -L "https://dl.bintray.com/journeymonitor/infra-artifacts/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz" -o /opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz > /opt/puppet/install/download-spark.log 2>&1',
command => '/usr/bin/curl -L "http://d3kbcqa49mib13.cloudfront.net/spark-2.0.2-bin-hadoop2.7.tgz" -o /opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz > /opt/puppet/install/download-spark.log 2>&1',
timeout => 1800,
creates => "/opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz",
creates => "/opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz",
require => File["/opt/puppet/install"],
}

exec { "install spark":
command => "/bin/tar xvfz /opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz -C /opt > /opt/puppet/install/install-spark.log 2>&1",
creates => "/opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/",
command => "/bin/tar xvfz /opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz -C /opt > /opt/puppet/install/install-spark.log 2>&1",
creates => "/opt/spark-2.0.2-bin-hadoop2.7/",
require => Exec["download spark"],
}

exec { "run spark master":
environment => [
"SPARK_WORKER_WEBUI_PORT=${worker_webui_startport}",
"JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64",
"JRE_HOME=/usr/lib/jvm/java-7-openjdk-amd64/jre",
],
command => "/bin/bash /opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/sbin/start-master.sh >> /var/log/spark-master.log 2>&1",
command => "/bin/bash /opt/spark-2.0.2-bin-hadoop2.7/sbin/start-master.sh >> /var/log/spark-master.log 2>&1",
unless => '/bin/ps axu | /bin/grep "java" | /bin/grep "org.apache.spark.deploy.master.Master" | /bin/grep -v "grep"',
require => [ File["/tmp/spark-events"], Exec["install spark"], Class["jre7"] ],
require => [ File["/tmp/spark-events"], Exec["install spark"], Class["jre8"] ],
}

/*
Expand All @@ -42,7 +40,7 @@
# See ../../spark-slave/manifests/init.pp:L16
@@exec { "run spark worker":
environment => ["SPARK_WORKER_WEBUI_PORT=${worker_webui_startport}"],
command => "/bin/bash /opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/sbin/start-slave.sh spark://${fqdn}:7077 >> /var/log/spark-worker.log 2>&1",
command => "/bin/bash /opt/spark-2.0.2-bin-hadoop2.7/sbin/start-slave.sh spark://${fqdn}:7077 >> /var/log/spark-worker.log 2>&1",
unless => '/bin/ps axu | /bin/grep "java" | /bin/grep "org.apache.spark.deploy.worker.Worker" | /bin/grep -v "grep"',
require => [ Exec["install spark"], Exec["run spark master"] ],
tag => "run-spark-worker"
Expand Down
14 changes: 6 additions & 8 deletions puppet/modules/spark-slave/manifests/init.pp
Original file line number Diff line number Diff line change
@@ -1,27 +1,25 @@
class spark-slave ($worker_webui_startport = 8081, $master_address) {

exec { "download spark":
command => '/usr/bin/curl -L "https://dl.bintray.com/journeymonitor/infra-artifacts/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz" -o /opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz > /opt/puppet/install/download-spark.log 2>&1',
command => '/usr/bin/curl -L "http://d3kbcqa49mib13.cloudfront.net/spark-2.0.2-bin-hadoop2.7.tgz" -o /opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz > /opt/puppet/install/download-spark.log 2>&1',
timeout => 1800,
creates => "/opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz",
creates => "/opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz",
require => File["/opt/puppet/install"],
}

exec { "install spark":
command => "/bin/tar xvfz /opt/puppet/install/spark-1.5.1-bin-hadoop-2.6_scala-2.11.tgz -C /opt > /opt/puppet/install/install-spark.log 2>&1",
creates => "/opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/",
command => "/bin/tar xvfz /opt/puppet/install/spark-2.0.2-bin-hadoop2.7.tgz -C /opt > /opt/puppet/install/install-spark.log 2>&1",
creates => "/opt/spark-2.0.2-bin-hadoop2.7/",
require => Exec["download spark"],
}

exec { "run spark worker":
environment => [
"SPARK_WORKER_WEBUI_PORT=${worker_webui_startport}",
"JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64",
"JRE_HOME=/usr/lib/jvm/java-7-openjdk-amd64/jre",
],
command => "/bin/bash /opt/spark-1.5.1-bin-hadoop-2.6_scala-2.11/sbin/start-slave.sh spark://${master_address}:7077 >> /var/log/spark-worker.log 2>&1",
command => "/bin/bash /opt/spark-2.0.2-bin-hadoop2.7/sbin/start-slave.sh spark://${master_address}:7077 >> /var/log/spark-worker.log 2>&1",
unless => '/bin/ps axu | /bin/grep "java" | /bin/grep "org.apache.spark.deploy.worker.Worker" | /bin/grep -v "grep"',
require => [ Exec["install spark"], Class["jre7"] ],
require => [ Exec["install spark"], Class["jre8"] ],
tag => "run-spark-worker"
}

Expand Down

0 comments on commit e22f0cb

Please sign in to comment.