日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程资源 > 编程问答 >内容正文

编程问答

hive操作DML

發(fā)布時間:2025/4/16 编程问答 41 豆豆
生活随笔 收集整理的這篇文章主要介紹了 hive操作DML 小編覺得挺不錯的,現(xiàn)在分享給大家,幫大家做個參考.

-- 向數據表中加載文件,不會對數據進行任何轉換,load操作只是復制(本地到hdfs)、移動(hdfs上的文件)
-- 如果表包含分區(qū),則必須指定每個分區(qū)的分區(qū)名,filepath可以引用一個文件或一個目錄
-- 如果數據在hdfs上,且filepath不是絕對的,則hive會相對于/user/進行解釋,會移動(記住)
-- 如果指定了overwrite,會先刪除目標表,再移動數據。如果沒有且有文件名沖突,那么現(xiàn)有的文件會被新文件替換。
load data [local] inpath 'filepath' [overwrite]
?into table tablename
?[partition (p1=val1,p2=val2...)]


-- 將查詢結果插入到hive表中
insert [overwrite] table table_name1 [partition (part1=val1,part2=val2)]
select * from table_name2

-- extendsion
from from_statement
insert [overwrite] table table_name1 [partition (part1=val1,part2=val2)]
select_statement1
insert [overwrite] table table_name2 [partition (part1=val1,part2=val2)]
select_statement2

-- 將查詢結果寫入文件系統(tǒng)
insert [overwrite] [local] directory directory1 select ... from ...
-- extension
from from_statement
insert [overwrite] [local] directory directory1 select_statement1
insert [overwrite] [local] directory directory2 select_statement2

-- select語句
select [all | distinct] c1,c2...
from table_name
[where where_condition]
[group by col_list]
[
?clustered by col_list sort by col_list |
?distribute by col_list
]
[limit number];

--
select * from t1;
--
select * from sales where amount >10 and region = 'cs';

select distinct col1,col2 from t1;
select distinct col1 from t1;

select * from t1 limit 5;

-- 輸出top-k,5
set mapred.reduce.tasks = 1
select * from sales sort by amount desc limit 5;

-- 基于分區(qū)的查詢
select p.*
?from page_view p
?where p.date >= '20140212' and p.date <='20140212';
-- having hive不支持,可以通過子查詢實現(xiàn)
select col1 from t1 group by col1 having sum(col2) > 10;
select col1 from (
?select col1,sum(col2) as col2sum from t1 group by col1
) t2
where t2.col2sum > 10;

-- 查詢結果寫入到目錄
insert overwrite [local] directory '/user/output'
select a.* from invites a where a.ds = '20120101';

-- group by
from invites a
insert overwrite table envents
select a.bar,count(*) where a.foo > 0 group by a.bar;

insert overwrite table envents
select a.bar,count(*) from invites a where a.foo > 0 group by a.bar;

-- join
from pokes t1 join invites t2 on (t1.bar = t2.bar)
insert overwrite table envents
select t1.bar,t1.foo,t2.foo;

-- 多表insert
from src
insert overwrite table dest1 select src.* where src.key < 100
insert overwrite table dest2 select src.key,src.value where src.key > 100 and src.key < 200
...;

-- streaming 不解
from invites a
insert overwrite table events
select transform(a.foo,a.bar) as(oof,rab) using '/bin/cat' where a.ds='20120212';

-- hive sql 使用實例
create table u_data(
?userid int,
?movieid int,
?rating int,
?unixtime string
)
row format delimited
fields terminated by '\t'
stored as textfile;

load data local inpath 'u.data'
overwrite into table u_data;

select count(1) from u_data;

mapper.py
import sys
import datetime
for line in sys.stdin
?line = line.strip()
?userid,movieid,rating,unixtime = line.split('\t')
?weekday = datetime.datetime.fromtimestamp(float(unixtime)).isoweekday()
?print '\t'.join([userid,movieid,rating,str(weekday)])

--
create table u_data_new(
?userid int,
?movieid int,
?rating int,
?weekday int
)
row format delimited
fields terminated by '\t';
-- 增加文件hive使用的
add file mapper.py

insert overwrite table u_data_new
select transform(userid,movieid,rating,unixtime) using 'python mapper.py' as (userid,movieid,rating,weekday)
from u_data;

select weekday,count(*) from u_data_new group by weekday;

---apache的網路日志
add jar ...hive_contrib.jar 將jar增加進hive的class路徑
create table apachelog(
?host string,
?identity string,
?user string,
?time string,
?request string,
?status string,
?size string,
?referer string,
?agent string
)
row format serde '.....RegexSerDe'
with serdeproperties(
?'...'='...'
)
stored as textfile;

hive 網路接口
hive --service hwi 開啟服務
http://master:9999/hwi

hive jdbc 操作
1、開啟監(jiān)聽服務
hive --service hiveserver

?

轉載于:https://www.cnblogs.com/jsunday/p/3872755.html

總結

以上是生活随笔為你收集整理的hive操作DML的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。