配置文件
hadoop-env.sh
core-site.xml
hdfs-site.xml
yarn-env.sh
mapred-env.sh
mapred-site.xml
启动hdfs-HA
./sbin/hadoop-daemon.sh start journalnode (每台机器都要启动)
hdfs namenode -format
hdfs zkfc -formatZK
start-all.sh
hadoop-daemon.sh start zkfc
验证端口 50070
下载
http://archive.apache.org/dist/spark/spark-2.2.1/
cdh版本
http://archive-primary.cloudera.com/cdh5/cdh/5/
端口8081
配置文件
slaves
spark-env.sh
本地
./bin/run-example SparkPi --master local[2]
stanalone
./bin/spark-submit --class org.apache.spark.examples.SparkPi --master spark://bigdata-cm02.kfk.com:7077 lib/spark-example-1.6.0-hadoop2.6.0.jar 10
yarn
./bin/spark-submit --class org.apache.spark.examples.SparkPi --master yarn-cluster lib/spark-example-1.6.0-hadoop2.6.0.jar 10
sbt compile
local
public: http://maven.aliyun.com/nexus/content/groups/public/
typesafe:http://dl.bintray.com/typesafe/ivy-releases/ , [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly
ivy-sbt-plugin:http://dl.bintray.com/sbt/sbt-plugin-releases/, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
sonatype-oss-releases
sonatype-oss-snapshots