Hive UDF 创建的两个例子

1. 全角到半角的转换

 1 package com.scb.udf;
 2 
 3 import org.apache.hadoop.hive.ql.exec.UDF;
 4 import org.apache.hadoop.io.Text;
 5 
 6 public class Full2Half extends UDF {
 7 
 8     public Text evaluate(final Text input) {
 9         char c[] = input.toString().toCharArray();
10         for (int index = 0; index < c.length; index++) {
11             if (c[index] == 12288) {// 全角空格
12                c[index] = (char) 32;
13             } else if (c[index] > 65280 && c[index] < 65375) {// 其他全角字符
14                 c[index] = (char) (c[index] - 65248);
15             }
16         }
17         return new Text(new String(c));
18     }
19 
20 }

2. Use hdfs file

 1 package com.scb.udf;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.hive.ql.exec.UDF;
 5 import org.apache.hadoop.io.Text;
 6 import org.apache.hadoop.fs.*;
 7 import java.io.*;
 8 import java.util.Properties;
 9 
10 public class HadoopMap extends UDF {
11 
12     
13 
14     public Text evaluate(final Text input) throws IOException {
15 
16         org.apache.hadoop.conf.Configuration conf = new Configuration();
17         FileSystem fs = FileSystem.get(conf);
18         Path path = new Path("/home/hypers/tmp/test.properties");
19         InputStream inputStream = fs.open(path).getWrappedStream();
20         Properties properties = new Properties();
21         properties.load(inputStream);
22         String value = properties.getProperty(input.toString(),"Not Find");
23         return new Text(value);
24         
25     }
26 
27 }

注册Function

hadoop fs -put -f full2half-1.0-SNAPSHOT.jar  /home/hypers/lib

beeline -u jdbc:hive2://******:10000/ -n hdfs -p ****

create function hadoopmap as 'com.scb.udf.HadoopMap' using jar 'hdfs:///home/hypers/lib/full2half-1.0-SNAPSHOT.jar';

select hadoopmap("Jason");

 

posted @ 2019-03-19 20:30  代号菜鸟  阅读(235)  评论(0编辑  收藏  举报