1.表数据备份 "备份到备份表中,执行以下操作,其中nginx_log_info_20180724 为备份表
create table nginx_log_info_20180724 as select * from nginx_log_info;
2.查看表结构 "1.show create table 表名;
3.describe 表名
4.---查询分区字段: show partitions pdm.trd_ta_dividend;
hive添加数据 INSERT INTO flt_ip_agency_hbs(key,sk_agency,agencyno,agency_name) VALUES ("20201105_135991","135991","000","诸菏酒类包装厂")
5.数据量大的时候在root下运行,统计记录数(适用于千万级数据量,利用了hbase jar中自带的统计行数的工具类) hbase org.apache.hadoop.hbase.mapreduce.RowCounter 'flt_ass_cust_fundbal_hbs'
6.从A查询数据写入到B表 "1.insert into pub.flt_agrm_tradeacco_reg (bk_fundaccount,bk_tradeaccount ) SELECT bk_fundaccount,bk_tradeaccount FROM pub.flt_agrm_tradeacco_reg001 limit 10;
7.insert into pub.flt_agrm_tradeacco_reg select * from pub.flt_agrm_tradeacco_reg001;"
8.查询分区字段 show partitions pdm.trd_ta_dividend;
9.hive环境认证 kinit -k -t /etc/hive.keytab hive
10.导出数据 hive -e "select bk_fundaccount,sk_invpty_of_cust,bk_invpty_of_cust from pub.flt_agrm_fundaccount" > /tmp/flt_agrm_fundaccount.txt ;
11.在hive上建一张外部表 "
create external table if not exists fun_user_external (
tid INT,
userid STRING,
pwd STRING,
create_time BIGINT,
email STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY '`' STORED AS TEXTFILE"