From 8bcced517b1d652ebc8cbdee95650dc949b6787f Mon Sep 17 00:00:00 2001 From: Baohua Yang Date: Thu, 24 Sep 2015 10:43:24 +0800 Subject: [PATCH] Add example running command to spark --- README.md | 6 +++++- spark_cluster/docker-compose.yml | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7e81d907..b23fe5f1 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,11 @@ Use nginx as a proxy with authentication for backend application. docker registry mirror, with redis as the backend cache. ## spark_cluster -Spark cluster with master and worker nodes +Spark cluster with master and worker nodes. ```sh docker-compose scale worker=2 ``` +Try submitting a test pi application using the spark-submit command. +```sh +/urs/local/spark/bin/spark-submit --master spark://master:7077 --class org.apache.spark.examples.SparkPi /usr/local/spark/lib/spark-examples-1.4.0-hadoop2.6.0.jar 1000 +``` diff --git a/spark_cluster/docker-compose.yml b/spark_cluster/docker-compose.yml index 013e56ec..f600f322 100644 --- a/spark_cluster/docker-compose.yml +++ b/spark_cluster/docker-compose.yml @@ -2,6 +2,8 @@ # This compose file will start spark master node and the worker node. # All nodes will become a cluster automatically. # You can run: docker-compose scale worker=2 +# After startup, try submit a pi calculation application. +# /urs/local/spark/bin/spark-submit --master spark://master:7077 --class org.apache.spark.examples.SparkPi /usr/local/spark/lib/spark-examples-1.4.0-hadoop2.6.0.jar 1000 master: image: sequenceiq/spark:1.4.0