13: SQL client 使用 UDF demo

1: ScalarFunction: 这种类型的UDF是一对一的,输入一个值,返回一个值。

import org.apache.flink.table.functions.ScalarFunction;



public class Hashstring extends ScalarFunction
{


    public Hashstring(){};
    private int factor = 12;
    public Hashstring(int factor)
    {
        this.factor = factor;
    }
    public int eval(String s) {
        return s.hashCode() * factor;
    }
}

2: TableFunction 是一对多的。下面的demo是输入一个String类型,返回Row 类型,有两列

import org.apache.flink.table.annotation.DataTypeHint;
import org.apache.flink.table.annotation.FunctionHint;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;


@FunctionHint(output = @DataTypeHint("ROW<first STRING, name STRING>"))
public  class MyFunction extends TableFunction<Row> {

    public void eval(String value) {
        Row row = new Row(2);
        row.setField(0, "first");
        row.setField(1, value.toString());
        collect(row);
    }
}

3:

 启动SQL client:               ./bin/sql-client.sh -embedded

下面是SQL client里执行的脚本;

create temporary function hash_compute as 'Hashstring';
create temporary function my_function as 'MyFunction';

CREATE TABLE orders (
    order_uid  BIGINT,
    product_name String,
    price      DECIMAL(32, 2),
    order_time TIMESTAMP(3)
) WITH (
    'connector' = 'datagen'
);
 

select hash_compute(product_name) from orders;

select col1, col2
from orders a
LEFT JOIN LATERAL TABLE(my_function(product_name)) AS T(col1, col2) ON TRUE;

  

 

posted @ 2022-01-25 16:33  刘大飞  阅读(129)  评论(0编辑  收藏  举报