代码拉取完成,页面将自动刷新
package com.hao.chapter11;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.ScalarFunction;
public class UdfTest_ScalarFunction {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//1.在创建表的DDL中直接定义时间属性
String creatDDL = "CREATE TABLE clickTable (" +
"user_name STRING," +
"url STRING," +
"ts BIGINT," +
"et AS TO_TIMESTAMP( FROM_UNIXTIME(ts / 1000))," + //事件时间 FROM_UNIXTIME() 能转换为年月日时分秒这样的格式 转换秒
" WATERMARK FOR et AS et - INTERVAL '1' SECOND " + //watermark 延迟一秒
")WITH(" +
" 'connector' = 'filesystem'," +
" 'path' = 'input/clicks.txt'," +
" 'format' = 'csv'" +
")";
tableEnv.executeSql(creatDDL);
//2.注册自定义标量函数
tableEnv.createTemporarySystemFunction("MyHash",MyHashFunction.class);
//3.调用UDF进行查询转换 (查询当前user以及user的hashcode)
Table resultTable = tableEnv.sqlQuery("select user_name,MyHash(user_name) from clickTable");
//4.转换成流打印
tableEnv.toDataStream(resultTable).print();
env.execute();
}
//自定义实现ScalarFunction
public static class MyHashFunction extends ScalarFunction{
public int eval(String str){
return str.hashCode();
}
}
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。