-- 类型转换:cast
SELECT CAST('00321' AS BIGINT) FROM table;
-- 判断奇数or偶数
-- 获取偶数的方法
select * from pos_info_report_tmp_20110712 r where mod(r.id,2) = 0;
-- 获取奇数的方法
select * from pos_info_report_tmp_20110712 r where mod(r.id,2) = 1;
--简单case函数
case sex
when '1' then '男'
when '2' then '女'
else '其他' end as sexual
case when sex = '1' then '男'
when sex = '2' then '女'
else '其他' end
-- 万能sql模型
set hive.exec.parallel=true;
set hive.auto.convert.join=true;
set hive.merge.mapfiles=true;
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.optimize.skewjoin= true;
set mapred.max.split.size= 536870912; -- (可选)
set mapred.min.split.size= 134217728;-- (可选)
set mapred.compress.map.output=true;
set mapred.compress.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
insert overwrite table loan_dw.table partition (dayno)
select t1.,t2.
from
(select col1,… from … where dayno=...) t1
left join
(select col1,… from … where dayno=...) t2 on t1.imei=t2.imei
left join
(select col1,… from … where dayno=... )t3 on nvl(t2.ssoid,rand())=t3.ssoid
;
echo $(date +%Y-%m-%d:%T) "$hql"
ExecuteHQL "${hql}"
v_job_stat=`expr ${v_job_stat} + $?`
-- 涉及到 in 条件的 hive 参数配置
set hive.strict.checks.cartesian.product = false;
set hive.mapred.mode = nonstrict;
-- 涉及到(字段)正则表达式
set hive.support.quoted.identifiers = none;
-- alter table 等功能
-- <u>表重命名</u>
alter table overseas_algo.mkt_app_tags_src_all_m rename to overseas_algo.mkt_app_rename_test;
-- <u>添加字段</u>
alter table overseas_algo.mkt_app_tags_src_all_m add columns (add_test string, rename_test int);
-- <u>修改字段名+类型</u>
alter table overseas_algo.mkt_app_tags_src_all_m CHANGE add_test add_formal double;
--
-- alter table overseas_algo.mkt_app_tags_src_all_m drop column add_formal; -- drop column 用不了, not work
-- You cannot drop column directly from a table using command ALTER TABLE table_name drop col_name;
-- The only way to drop column is using replace command. Lets say, I have a table emp with id, name and dept column. I want to drop id column of table emp. So provide all those columns which you want to be the part of table in replace columns clause. Below command will drop id column from emp table.
-- alter table overseas_algo.mkt_app_tags_src_all_m replace columns(add_formal double, source int); -- still not work
-- We can’t simply drop a table column from a hive table using the below statement like sql.
-- ALTER TABLE tbl_name drop column column_name ---- it will not work.
-- So there is a shortcut to drop columns from a hive table.
-- Let’s say we have a hive table.
-- From this table I want to drop the column Dob. You can use the ALTER TABLE REPLACE statement to drop a column.
-- ALTER TABLE test_tbl REPLACE COLUMNS(ID STRING,NAME STRING,AGE STRING); you have to give the column names which you want to keep in the table
-- ALTER TABLE table_name REPLACE COLUMNS ( c1 int, c2 String);
-- NOTE: eliminate column from column list. It will keep matched columns and removed unmentioned columns from table schema.
alter table overseas_algo.mkt_app_tags_src_all_m replace columns(app_id bigint, app_name string, tag_id int, tag_name string, accuracy double, type_id int, type_name string, source int); -- work √ 把需要的字段放入replace columns即可(排除要drop的字段)
-- 分区
alter table overseas_algo.mkt_app_tags_src_all_m add partition (dayno=20200512) location 'hdfs://in-cdh1-hdfs/warehouse/overseas/overseas_algo.db/mkt_app_tags_src_all_m' -- user does not have enough privilege for query
-- alter table overseas_algo.mkt_app_tags_src_all_m RENAME PARTITION dayno to dayno_test; -- not work
alter table overseas_algo.mkt_app_tags_src_all_m partition (dayno=20200505) rename to partition (dayno=20200511);
create view overseas_algo.live_stream_view as select app_id, app_name from overseas_algo.mkt_app_tags_src_all_m where dayno=20200511 and lower(tag_name)='live streaming';
desc overseas_algo.live_stream_view;
desc overseas_algo.mkt_app_tags_src_all_m;
select * from overseas_algo.live_stream_view limit 100;
-- 直播类app数据表更名
alter table overseas_algo.live_app_info_id_v2 rename to overseas_algo.live_app_info;