大数据(Flink)—数据写入数据库篇

Flink写入mysql的几种方式,废话不多说直接上代码:

  相关jar包
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-jdbc_2.11</artifactId>
            <version>1.14.4</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-jdbc_2.11</artifactId>
            <version>1.9.1</version>
        </dependency>            

第一种:flink  JDBC Connector

用法示例:
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env
        .fromElements(...)
        .addSink(JdbcSink.sink(
                "insert into books (id, title, author, price, qty) values (?,?,?,?,?)",
                (ps, t) -> {
                    ps.setInt(1, t.id);
                    ps.setString(2, t.title);
                    ps.setString(3, t.author);
                    ps.setDouble(4, t.price);
                    ps.setInt(5, t.qty);
                },
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl(getDbMetadata().getUrl())
                        .withDriverName(getDbMetadata().getDriverClass())
                        .build()));
env.execute();

第二种:Mysql JDBCOutputFormat

用法实例:

 FlinkKafkaConsumer011<String> consumer011 = new FlinkKafkaConsumer011<String>("flink_order", new SimpleStringSchema(), props);
        DataStream<String> stream = env.addSource(consumer011);
        stream.map(new MapFunction<String, Row>() {

            @Override
            public Row map(String s) throws Exception {
                System.out.println(s);
                Row row = new Row(3);
                int va=1;
                row.setField(0, va);
                row.setField(1, va);
                row.setField(2, va);
                return row;
            }
        }).writeUsingOutputFormat(JDBCOutputFormat.buildJDBCOutputFormat()
                .setDrivername("com.mysql.jdbc.Driver")
                .setDBUrl("jdbc:mysql://xx")
                .setUsername("xx")
                .setPassword("xx")
                .setQuery("insert into order_cnt(cnt,user,num) values(?,?,?)")
                .setSqlTypes(new int[]{Types.INTEGER, Types.INTEGER, Types.INTEGER})
                .finish());

        try {
            env.execute("flink-test");
        } catch (Exception e) {
            e.printStackTrace();
        }

第三种:自定义数据源(mybatis整合)

 Flink JOB代码:
     DataStreamSource<HttpFailEntity> dataStreamSource = env.addSource(new Source());
        dataStreamSource.addSink(new MybatisSink("com.example.springbootflink.dao.mapper.save"));
        JobExecutionResult result = env.execute("My Flink ");
        System.out.println("The job Mybatis took " + result.getNetRuntime() + " to execute");
自定义数据类:
public class MybatisSink extends RichSinkFunction<HttpFailEntity> {

    private SqlSessionFactory sqlSessionFactory;

    private SqlSession sqlSession;

    /**
     * 执行sql的id
     */
    private String sqlId;

    public MybatisSink(String sqlId) {
        this.sqlId = sqlId;

    }

    @Override
    public void open(Configuration parameters) throws Exception {
        sqlSessionFactory = MybatisSqlSessionFactory.sqlSessionFactory;
        sqlSession = sqlSessionFactory.openSession(true);
    }

    @Override
    public void close() throws Exception {
        sqlSession.close();
    }
    /**
     * 执行任务
     * @param value
     * @param context
     */
    @Override
    public void invoke(HttpFailEntity value, Context context) {
        try {
            sqlSession.insert(sqlId, value);
            sqlSession.commit();
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }

}
mybtis的配置文件:
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE configuration PUBLIC "-//mybatis.org//DTD SQL Map Config 3.0//EN"
        "http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
    <properties resource="数据库配置文件"></properties>
    <settings>
        <setting name="useGeneratedKeys" value="true"/>
        <setting name="defaultExecutorType" value="REUSE"/>
        <!-- <setting name="logImpl" value="STDOUT_LOGGING"/>  打印查询语句 -->
    </settings>

    <environments default="default">
        <environment id="default">
            <transactionManager type="JDBC"/>
            <dataSource type="com.example.springbootflink.flink.DruidDataSourceFactory">
                <property name="driverClassName" value="${db.driver}"/>
                <property name="url" value="${mysql.master.url}"/>
                <property name="username" value="${mysql.master.username}"/>
                <property name="password" value="${mysql.master.password}"/>
            </dataSource>
        </environment>
    </environments>
    <mappers>
<!--        <mapper class="com.ctid.maap.mapper.MessageDownMapper"></mapper>-->
        <mapper resource="mapper/HttpFailEntity.xml"/>
    </mappers>
</configuration>


mybatis相关类:
public class DruidDataSourceFactory extends PooledDataSourceFactory {
    public DruidDataSourceFactory() {
        this.dataSource = new DruidDataSource();
    }

}



public class MybatisSqlSessionFactory {


    public static SqlSessionFactory sqlSessionFactory;

    static {
        try(InputStream in = Resources.getResourceAsStream("mybatis_conf.xml")) {
            sqlSessionFactory = new SqlSessionFactoryBuilder().build(in);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
posted @ 2022-09-23 17:20  java彼岸花  阅读(826)  评论(0编辑  收藏  举报
/* 鼠标点击求赞文字特效 */ /*鼠标跟随效果*/