• Flink 1.11 Table API 实现kafka到mysql


    pom.xml

    <?xml version="1.0" encoding="UTF-8"?>
    <project xmlns="http://maven.apache.org/POM/4.0.0"
             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
             xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <parent>
            <artifactId>stream</artifactId>
            <groupId>com.dark</groupId>
            <version>1.0-SNAPSHOT</version>
        </parent>
        <modelVersion>4.0.0</modelVersion>
    
        <artifactId>stream-job</artifactId>
    
        <properties>
            <flink.version>1.11.2</flink.version>
        </properties>
    
        <dependencies>
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-table-planner-blink_2.11</artifactId>
                <version>${flink.version}</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-connector-kafka_2.12</artifactId>
                <version>${flink.version}</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-clients_2.12</artifactId>
                <version>${flink.version}</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-json</artifactId>
                <version>${flink.version}</version>
            </dependency>
    
            <dependency>
                <groupId> org.apache.cassandra</groupId>
                <artifactId>cassandra-all</artifactId>
                <version>0.8.1</version>
                <exclusions>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>log4j</groupId>
                        <artifactId>log4j</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
    
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-connector-jdbc_2.12</artifactId>
                <version>${flink.version}</version>
            </dependency>
    
            <dependency>
                <groupId>mysql</groupId>
                <artifactId>mysql-connector-java</artifactId>
                <version>8.0.22</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.flink</groupId>
                <artifactId>flink-runtime-web_2.12</artifactId>
                <version>${flink.version}</version>
            </dependency>
        </dependencies>
    </project>
    View Code

    java code

    package com.dark;
    
    import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
    import org.apache.flink.table.api.EnvironmentSettings;
    import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
    
    /**
     * @Author Dark
     * @CreteTime 2020/11/6
     */
    public class StreamJob {
    
        public static void main(String[] args) throws Exception {
    
            StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
            EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
            StreamTableEnvironment bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings);
    
            bsTableEnv.executeSql("CREATE TABLE user_behavior (
    " +
                    "    user_id BIGINT,
    " +
                    "    item_id BIGINT,
    " +
                    "    category_id BIGINT,
    " +
                    "    behavior STRING,
    " +
                    "    ts TIMESTAMP(3),
    " +
                    "    proctime AS PROCTIME(),   -- generates processing-time attribute using computed column
    " +
                    "    WATERMARK FOR ts AS ts - INTERVAL '5' SECOND  -- defines watermark on ts column, marks ts as event-time attribute
    " +
                    ") WITH (
    " +
                    "    'connector' = 'kafka',  -- using kafka connector
    " +
                    "    'topic' = 'user_behavior',  -- kafka topic
    " +
                    "    'scan.startup.mode' = 'earliest-offset',  -- reading from the beginning
    " +
                    "    'properties.bootstrap.servers' = 'bigdata1:9092',  -- kafka broker address
    " +
                    "    'properties.zookeeper' = 'bigdata1:2181',  -- zookeeper address
    " +
                    "    'format' = 'json'  -- the data format is json
    " +
                    ")");
    
            bsTableEnv.executeSql("CREATE TABLE buy_cnt_per_hour (
    " +
                    "    hour_of_day BIGINT,
    " +
                    "    buy_cnt BIGINT
    " +
                    ") WITH (
    " +
                    "   'connector' = 'jdbc',
    " +
                    "   'url' = 'jdbc:mysql://bigdata1:3306/stream?createDatabaseIfNotExist=true&serverTimezone=UTC&useUnicode=true&characterEncoding=utf8&useSSL=false&AllowPublicKeyRetrieval=True',
    " +
                    "   'table-name' = 'buy_cnt_per_hour' ,
    " +
                    "   'driver' = 'com.mysql.cj.jdbc.Driver',
    " +
                    "   'username' = 'root', 
    " +
                    "   'password' = '123456' 
    " +
                    ")");
    
           bsTableEnv.executeSql("INSERT INTO buy_cnt_per_hour
    " +
                   "SELECT HOUR(TUMBLE_START(ts, INTERVAL '1' HOUR)), COUNT(*)
    " +
                   "FROM user_behavior
    " +
                   "WHERE behavior = 'buy'
    " +
                   "GROUP BY TUMBLE(ts, INTERVAL '1' HOUR)");
    
            bsEnv.execute("");
        }
    }
    View Code
  • 相关阅读:
    数据结构之链表——加里森的任务(循环链表)
    数据结构之队列——回文字判断
    数据结构之栈——二进制转十进制
    《爱的艺术》人类超越了本能
    从一个Activity返回上一个Activity
    VS(C++)编程遇到的错误集合
    C++(MFC)编程一些注意事项
    Tomcat部署(进行web服务器开发)
    本地IP与宽带IP
    opencv的Mat图像显示在MFC控件中
  • 原文地址:https://www.cnblogs.com/vip-nange/p/13944017.html
Copyright © 2020-2023  润新知