[FLINK-27509] update table-walkthrough playground for Flink 1.14 master release-1.14
authorShubham Bansal <illusionist.neo@gmail.com>
Mon, 23 May 2022 09:52:46 +0000 (02:52 -0700)
committerGitHub <noreply@github.com>
Mon, 23 May 2022 09:52:46 +0000 (11:52 +0200)
README.md
docker/data-generator/Dockerfile
table-walkthrough/Dockerfile
table-walkthrough/docker-compose.yml
table-walkthrough/pom.xml
table-walkthrough/src/main/java/org/apache/flink/playgrounds/spendreport/SpendReport.java

index 84937d4d87467bff8741fa482e8719822dfc4868..c7b6fa729dbbd24a1ac94d3bbadf2e415a80c68e 100644 (file)
--- a/README.md
+++ b/README.md
@@ -13,7 +13,7 @@ Currently, the following playgrounds are available:
 Flink job. The playground is presented in detail in
 ["Flink Operations Playground"](https://ci.apache.org/projects/flink/flink-docs-release-1.14/docs/try-flink/flink-operations-playground), which is part of the _Try Flink_ section of the Flink documentation.
 
-* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/try-flink/table_api), which is part of the _Try Flink_ section of the Flink documentation.
+* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.14/docs/try-flink/table_api), which is part of the _Try Flink_ section of the Flink documentation.
 
 * The **PyFlink Walkthrough** (in the `pyflink-walkthrough` folder) provides a complete example that uses the Python API, and guides you through the steps needed to run and manage Pyflink Jobs. The pipeline used in this walkthrough reads data from Kafka, performs aggregations, and writes results to Elasticsearch that are visualized with Kibana. This walkthrough is presented in detail in the [pyflink-walkthrough README](pyflink-walkthrough).
 
index 5434adec75432494b0e6b9daf64be726ca1d7d3c..124033c4e98c54468a07de440ed90c73ed489b31 100644 (file)
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-FROM maven:3.6-jdk-8-slim AS builder
+FROM maven:3.8-jdk-8-slim AS builder
 
 # Get data producer code and compile it
 COPY ./src /opt/data-producer/src
index 781cb562e7f8244abe41231cd4cf65ec7af62f41..f1854463b42f0dd8576ff42bbeeeebd663ec4c44 100644 (file)
 # limitations under the License.
 ################################################################################
 
-FROM maven:3.6-jdk-8-slim AS builder
+FROM maven:3.8-jdk-8-slim AS builder
 
 COPY ./pom.xml /opt/pom.xml
 COPY ./src /opt/src
 RUN cd /opt; mvn clean install -Dmaven.test.skip
 
-FROM apache/flink:1.13.1-scala_2.12-java8
+FROM apache/flink:1.14.4-scala_2.12-java8
 
 # Download connector libraries
-RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.12/1.13.1/flink-sql-connector-kafka_2.12-1.13.1.jar; \
-    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.12/1.13.1/flink-connector-jdbc_2.12-1.13.1.jar; \
-    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.13.1/flink-csv-1.13.1.jar; \
+RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.12/1.14.4/flink-sql-connector-kafka_2.12-1.14.4.jar; \
+    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.12/1.14.4/flink-connector-jdbc_2.12-1.14.4.jar; \
+    wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.14.4/flink-csv-1.14.4.jar; \
     wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.19/mysql-connector-java-8.0.19.jar;
 
 COPY --from=builder /opt/target/spend-report-*.jar /opt/flink/usrlib/spend-report.jar
index 4f8ff6d2961c9b56ee35abdb2cd797e0cf8455d9..194250a2b3d4bb7b56ebc30fa27f1a4ca82aa900 100644 (file)
@@ -19,7 +19,7 @@
 version: '2.1'
 services:
   jobmanager:
-    image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
+    image: apache/flink-table-walkthrough:1-FLINK-1.14-scala_2.12
     build: .
     hostname: "jobmanager"
     expose:
@@ -33,7 +33,7 @@ services:
       - kafka
       - mysql
   taskmanager:
-    image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
+    image: apache/flink-table-walkthrough:1-FLINK-1.14-scala_2.12
     build: .
     expose:
       - "6121"
@@ -50,7 +50,7 @@ services:
     ports:
       - "2181:2181"
   kafka:
-    image: wurstmeister/kafka:2.12-2.2.1
+    image: wurstmeister/kafka:2.13-2.8.1
     ports:
       - "9092:9092"
     depends_on:
index 9c953372359031cf635f00e59642f6c01eb5a099..0ae9ae9f5948ac7c1fb86b65b958ab6ac374aab6 100644 (file)
@@ -30,7 +30,7 @@ under the License.
 
        <properties>
                <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-               <flink.version>1.13.1</flink.version>
+               <flink.version>1.14.4</flink.version>
                <java.version>1.8</java.version>
                <scala.binary.version>2.12</scala.binary.version>
                <maven.compiler.source>${java.version}</maven.compiler.source>
@@ -72,12 +72,6 @@ under the License.
             <version>${flink.version}</version>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.flink</groupId>
-            <artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
-            <version>${flink.version}</version>
-            <scope>test</scope>
-               </dependency>
         <dependency>
             <groupId>org.apache.flink</groupId>
             <artifactId>flink-clients_${scala.binary.version}</artifactId>
index 1a8cb8320384d8e3753710171ac882c85f18b5ab..d25710221990dd1def68ef8705186b720439a51d 100644 (file)
@@ -45,6 +45,7 @@ public class SpendReport {
                 "    'connector' = 'kafka',\n" +
                 "    'topic'     = 'transactions',\n" +
                 "    'properties.bootstrap.servers' = 'kafka:9092',\n" +
+                "    'scan.startup.mode' = 'earliest-offset',\n" +
                 "    'format'    = 'csv'\n" +
                 ")");