配置文件修改

#更改环境变量
sudo gedit ~/.bashrc
# 使文件生效
source ~/.bashrc
# 查看系统环境变量
echo $PATH
# 查看是否安装成功
xxxx -version

---------------------------------------------------------
#set Java environment

export JAVA_HOME=/home/stephyfifi/dev/jdk1.8.0_231
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
export PATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH

#set scala environment

export SCALA_HOME=/home/stephyfifi/dev/scala-2.11.11
export PATH=$PATH:$SCALA_HOME/bin

#set spark environment
export SPARK_HOME=/home/stephyfifi/dev/spark-2.4.4-bin-hadoop2.7
export PATH=$PATH:${SPARK_HOME}/bin

# set hadoop environment
export HADOOP_HOME=/home/stephyfifi/dev/hadoop-2.7.7
export PATH=$PATH:$HADOOP_HOME/bin 

# 1. set core-hadoop.xml 
<configuration>
<property>
        <name>hadoop.tmp.dir</name>
        <value>file:///home/stephyfifi/dev/hadoop-2.7.7</value>
        <description>Abase for other temporary directories.</description>
    </property>
    <property>
        <name>fs.defaultFS</name>
        <value>hdfs://127.0.0.1</value>
    </property>
</configuration>

# 2. set hdfs-site.xml
<configuration>
        <property>
        <name>dfs.replication</name>
        <value>1</value>
    </property>
    <property>
        <name>dfs.namenode.name.dir</name>
        <value>file:///home/stephyfifi/dev/hadoop-2.7.7/tmp/dfs/name</value>
    </property>
    <property>
        <name>dfs.datanode.data.dir</name>
        <value>file:///opt/hadoop-2.7.6/tmp/dfs/data</value>
    </property>
</configuration>

# 3. set yarn env
# 3.1 set mapred-site.xml
<configuration>
    <!-- 通知框架MR使用YARN -->
    <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
    </property>
</configuration>

# 3.2 set yarn-site.xml
<configuration>
    <!-- reducer取数据的方式是mapreduce_shuffle -->sudo 
    <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
    </property>
</configuration>

# set Hive env
# 1. hive-site.xml文件配置
<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://master:3306/hive?createDatabaseIfNotExist=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Driver</value>
  <description>Driver class name for a JDBC metastore</description>
</property>
<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>hive</value>
  <description>username to use against metastore database</description>
</property>
<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>hive</value>
  <description>password to use against metastore database</description>
</property>

--------------------------------------------------------
# 启动spark
spark-shell
# 启动Hbase
hbase shell
-------------------
安装Docker
Ubuntu18.04LTS安装Docker


©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容