hive分析nginx日志之UDF清洗數據


hive分析nginx日志一:http://www.cnblogs.com/wcwen1990/p/7066230.html

hive分析nginx日志二:http://www.cnblogs.com/wcwen1990/p/7074298.html

接着來看:

1、首先編寫UDF,如下:
--使用String類型的replaceAll()函數:

package net.dbking.hadoop.chavin_hive;

import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;

public class RemoveQuotesUDF extends UDF{
    
     public Text evaluate(Text str){
         if(null == str.toString()){
             return new Text();
         }
         return new Text (str.toString().replaceAll("\"", ""));
     }
}

2、去除“[]”的UDF:

package net.dbking.hadoop.chavin_hive;

import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;

public class RemoveBracketUDF extends UDF{
    
     public Text evaluate(Text str){
         if(null == str.toString()){
             return new Text();
         }
    
         return new Text (str.toString().substring(1,str.toString().length()-1));
        
     }
    
}

3、時間日志格式化UDF:

package net.dbking.hadoop.chavin_hive;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;

import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;

public class DataTransformUDF extends UDF{
    
     private final SimpleDateFormat inputFormat = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss", Locale.ENGLISH);
     private final SimpleDateFormat outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    
     public Text evaluate(Text input){
        
         Text output = new Text();
        
         if(null == input){
             return null;
         }
        
         String inputDate = input.toString().trim();
        
         if(null == inputDate){
             return null;
         }
        
         try{
             Date parseDate = inputFormat.parse(inputDate);
            
             String outputDate = outputFormat.format(parseDate);
            
             output.set(outputDate);
            
         }catch(Exception e){
            
             e.printStackTrace();
            
             return output;
         }
        
         return output;
        
     }
    
}

4、編寫插入數據hive腳本:

add jar /opt/cloudera/parcels/CDH/lib/hive/lib/hive-contrib-1.1.0-cdh5.9.2.jar;

add jar /opt/cloudera/jars/RemoveQuotesUDF.jar;
add jar /opt/cloudera/jars/RemoveBracketUDF.jar;
add jar /opt/cloudera/jars/DateTransformUDF.jar;

create temporary function my_removequote as "net.dbking.hadoop.chavin_hive.RemoveQuotesUDF";
create temporary function my_removebracket as "net.dbking.hadoop.chavin_hive.RemoveBracketUDF";
create temporary function my_datetransform as "net.dbking.hadoop.chavin_hive.DateTransformUDF";

insert overwrite table chavin.nginx_access_log_comm
select my_removequote(host),
my_datetransform(my_removebracket(time)),
my_removequote(request),
my_removequote(referer)
from chavin.nginx_access_log;

測試插入數據:
select * from chavin.nginx_access_log_comm limit 5;

 

場景1:分析哪個時間段,網站訪問量最大:

select substring(time,12,2) hour,count(1) cnt
from chavin.nginx_access_log_comm
group by substring(time,12,2)
order by cnt desc;


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM