export HADOOP_CONF_DIR=/data/hadoop/etc/hadoop
export KRB5_CONFIG=$HADOOP_CONF_DIR/krb5.conf
export HADOOP_JAAS_DEBUG=true
FLINK_HOME=./
# 用户开发的flink程序和main class
WORDCOUNT_JAR=./flink_stream_demo.jar
MAIN=com.xx.KafkaWordCount
hadoop_principal=hdfs/hadoop_cluster@HADOOP.COM
#认证信息, keytab和krb5.conf会自动上传到/user/hdfs/.flink/application_xx下
authConfig="-yD security.kerberos.krb5-conf.path=$KRB5_CONFIG -yD security.kerberos.login.keytab=$HADOOP_CONF_DIR/user.keytab -yD security.kerberos.login.principal=$hadoop_principal"
# Flink客户端提交任务时的参数, 防止跨KDC认证时会默认加载当前机器的/etc/krb5.conf, 会导致认证失败
export FLINK_ENV_JAVA_OPTS_CLI="-Djava.security.krb5.conf=$KRB5_CONFIG"
kdestroy
kinit -kt $HADOOP_CONF_DIR/user.keytab $hadoop_principal
$FLINK_HOME/bin/flink run -d \
-m yarn-cluster \
-yD sun.security.krb5.debug=true \
$authConfig \
-c $MAIN $WORDCOUNT_JAR \
-t streaming
application目录结构