sqoop&Hive實例


案例:
1. 把原始log數據加載到表beifenglog中;
2. 建立子表beifenglog_hour_visit,存貯常用字段,並從原表中提取數據到子表中;
3. 提取原表數據過程中,使用UDF處理字段信息,存儲到子表中;
 
 
1. 把原始log數據加載到表beifenglog中;

create table beifenglog(
remote_addr string,
remote_user string,
time_local string,
request string,
status string,
body_bytes_sent string,
request_body string,
http_referer string,
http_user_agent string,
http_x_forwarded_for string,
host string)
row format serde 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'
with serdeproperties(
"input.regex" = "(\\\"[\\d\\.]+\\\") (\\\"[^ ]+\\\") (\\\".*?\\\") (\\\".*?\\\") (\\\"\\d+\\\") (\\\"\\d+\\\") ([^ ]+) (\\\"[^ ]+\\\") (\\\".*?\\\") (\\\"[^ ]+\\\") (\\\"[^ ]+\\\")"
)
stored as textfile; 

加載原表數據
load data local inpath '/opt/test/beifenglog.data' overwrite into table beifenglog;
 
2. 建立子表beifenglog_hour_visit,存貯常用字段,並從原表中提取數據到子表中;

create table beifenglog_hour_visit(
remote_addr string,
time_local string,
request string,
http_referer string)
row format delimited fields terminated by '\t'
stored as orc tblproperties ("orc.compression"="snappy");
 
insert overwrite table beifenglog_hour_visit select remote_addr,time_local,request,http_referer from beifenglog ; 

 
報錯1:
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.ClassNotFoundException: Class org.apache.hadoop.hive.contrib.serde2.RegexSerDe not found
        at org.apache.hadoop.hive.ql.exec.MapOperator.getConvertedOI(MapOperator.java:334)
        at org.apache.hadoop.hive.ql.exec.MapOperator.setChildren(MapOperator.java:352)
        at org.apache.hadoop.hive.ql.exec.mr.ExecMapper.configure(ExecMapper.java:126)
 
解決: hive-site.xml

<property>
    <name>hive.aux.jars.path</name>
    <value>file:///opt/modules/cdh/hive-0.13.1-cdh5.3.6/lib/hive-contrib-0.13.1-cdh5.3.6.jar</value>
    <description>Added by tiger.zeng on 20120202.These JAR file are available to all users for all jobs</description>
</property> 

 
報錯2:
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.IllegalArgumentException: No enum constant org.apache.hadoop.hive.ql.io.orc.CompressionKind.snappy
        at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:469)
        at org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:550)
        at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:796)
        at org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:84)
        at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:796)
        at org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(TableScanOperator.java:92)
        at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:796)
        at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:549)
        ... 9 more
        
解決:重新創建子表
("orc.compress"="snappy") --> ("orc.compression"="snappy")
 
 
3. 提取原表數據過程中,使用UDF處理字段信息,存儲到子表中;

add jar /opt/test/hive.jar;
create temporary function removequote as 'org.gh.hadoop.hive.test.RemoveQuote';
create temporary function formatdate as 'org.gh.hadoop.hive.test.FormatDate';
 
insert overwrite table beifenglog_hour_visit select removequote(remote_addr),formatdate(removequote(time_local)),removequote(request),removequote(http_referer) from beifenglog ; 

 

FormatData.java內容如下:

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import org.apache.hadoop.hive.ql.exec.UDF;
 
public class FormatDate extends UDF{
    private SimpleDateFormat inputFormat = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss", Locale.ENGLISH);
    private SimpleDateFormat outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    public String evaluate(String dateStr){
        if(dateStr == null){
            return null;
        }
         
        Date date = null;
        String ret = null;
        try{
            date = inputFormat.parse(dateStr);
            ret = outputFormat.format(date);
        }catch(Exception e){
            e.printStackTrace();
            return null;
        }
        return ret;
    }
} 

 
RemoveQuote.java內容如下:

package org.gh.hadoop.hive.test;
import org.apache.hadoop.hive.ql.exec.UDF;
public class RemoveQuote extends UDF{
    public String evaluate(String str){
        if(str == null){
            return null;
        }
        return str.replaceAll("\"", "");
    }
} 

 


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM