Flink_pom
<!-- 指定仓库位置,依次为aliyun、apache和cloudera仓库 -->
<repositories>
<repository>
<id>aliyun</id>
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
</repository>
<repository>
<id>apache</id>
<url>https://repository.apache.org/content/repositories/snapshots/</url>
</repository>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
</repository>
</repositories>
<properties>
<encoding>UTF-8</encoding>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<java.version>1.8</java.version>
<scala.version>2.12</scala.version>
<flink.version>1.12.0</flink.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-scala-bridge_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- flink执行计划,这是1.9版本之前的-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- blink执行计划,1.11+默认的-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
</dependency>
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-cep_2.12</artifactId>
<version>${flink.version}</version>
</dependency>-->
<!-- flink连接器-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-sql-connector-kafka_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- <dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-filesystem_2.12</artifactId>
<version>${flink.version}</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-jdbc_2.12</artifactId>
<version>${flink.version}</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-parquet_2.12</artifactId>
<version>${flink.version}</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.9.2</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
<version>1.10.0</version>
</dependency>-->
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
<exclusions>
<exclusion>
<artifactId>flink-streaming-java_2.11</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>flink-runtime_2.11</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>flink-core</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
<exclusion>
<artifactId>flink-java</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hive_2.12</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-hadoop-2-uber</artifactId>
<version>2.7.5-10.0</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
<!--<version>8.0.20</version>-->
</dependency>
<!-- 高性能异步组件:Vertx-->
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
<version>3.9.0</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-jdbc-client</artifactId>
<version>3.9.0</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-redis-client</artifactId>
<version>3.9.0</version>
</dependency>
<!-- 日志 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.7</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.44</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.2</version>
<scope>provided</scope>
</dependency>
<!-- 参考:https://blog.csdn.net/f641385712/article/details/84109098-->
<!--<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>4.4</version>
</dependency>-->
<!--<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<version>0.9.3</version>
<type>pom</type>
<scope>provided</scope>
</dependency>-->
<!--<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>28.2-jre</version>
</dependency>-->
</dependencies>
<build>
<sourceDirectory>src/main/java</sourceDirectory>
<plugins>
<!-- 编译插件 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<!--<encoding>${project.build.sourceEncoding}</encoding>-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<useFile>false</useFile>
<disableXmlReport>true</disableXmlReport>
<includes>
<include>**/*Test.*</include>
<include>**/*Suite.*</include>
</includes>
</configuration>
</plugin>
<!-- 打包插件(会包含所有依赖) -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<!--
zip -d learn_spark.jar META-INF/*.RSA META-INF/*.DSA META-INF/*.SF -->
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<!-- 设置jar包的入口类(可选) -->
<mainClass></mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
Flink_gradle
plugins {
id 'java'
id 'scala'
id 'maven-publish'
id 'idea'
id "com.github.johnrengelman.shadow" version "4.0.2"
}
group 'com.gtja'
version '1.0.0'
apply plugin: 'java'
apply plugin: 'scala'
apply plugin: 'idea'
sourceCompatibility = 1.8
targetCompatibility = 1.8
configurations {
provided
}
repositories {
mavenLocal()
mavenCentral()
maven {
url '/Users/ailian/Documents/export/server/apache-maven-3.8.1/repository'
}
}
dependencies {
// implementation fileTree(dir: 'lib',include: '*.jar')
/* scala */
implementation 'org.scala-lang:scala-library:2.12.8'
testImplementation group: 'org.scalatest', name: 'scalatest_2.12', version: '3.2.10'
implementation group: 'org.scala-lang.modules', name: 'scala-xml_2.12', version: '2.0.1'
/* commons */
implementation group: 'commons-io', name: 'commons-io', version: '2.11.0'
/* Flink1.12.0*/
//项目基本依赖【因为我先学习java代码,再自己写scala代码,所以配置了两种语言的代码,一般配置一种就ok】
//官网:https://ci.apache.org/projects/flink/flink-docs-release-1.12/zh/dev/project-configuration.html
implementation group: 'org.apache.flink', name: 'flink-scala_2.12', version: '1.12.0'
implementation group: 'org.apache.flink', name: 'flink-java', version: '1.12.0'
compileOnly group: 'org.apache.flink', name: 'flink-streaming-scala_2.12', version: '1.12.0' //scala
compileOnly group: 'org.apache.flink', name: 'flink-streaming-java_2.12', version: '1.12.0' //java
//flink-client
implementation group: 'org.apache.flink', name: 'flink-clients_2.12', version: '1.12.0'
//Flink - table
implementation group: 'org.apache.flink', name: 'flink-table-api-scala-bridge_2.12', version: '1.12.0'
compileOnly group: 'org.apache.flink', name: 'flink-table-api-java-bridge_2.12', version: '1.12.0'
//blink执行计划,1.11+默认的
testImplementation group: 'org.apache.flink', name: 'flink-table-planner-blink_2.12', version: '1.12.0'
compileOnly group: 'org.apache.flink', name: 'flink-table-common', version: '1.12.0'
//flink-cep
compileOnly group: 'org.apache.flink', name: 'flink-cep_2.12', version: '1.12.0'
//flink链接器【我kafka的版本是】 kafka_2.11-1.0.0 【选择2.11版本的,实际生产中scala版本也不是我们控制】
implementation group: 'org.apache.flink', name: 'flink-connector-kafka_2.11', version: '1.12.0'
compileOnly group: 'org.apache.flink', name: 'flink-sql-connector-kafka_2.11', version: '1.12.0'
compileOnly group: 'org.apache.flink', name: 'flink-connector-jdbc_2.12', version: '1.12.0'
testImplementation group: 'org.apache.flink', name: 'flink-csv', version: '1.12.0'
testImplementation group: 'org.apache.flink', name: 'flink-json', version: '1.12.0'
/* Hive */
compileOnly group: 'org.apache.flink', name: 'flink-connector-hive_2.12', version: '1.12.0'
implementation group: 'org.apache.hive', name: 'hive-metastore', version: '2.1.0'
// implementation group: 'org.apache.hive', name: 'hive-exec', version: '2.1.0'
/* Hadoop */
// compileOnly group: 'org.apache.flink', name: 'flink-shaded-hadoop-2-uber', version: '2.7.5-10.0'
/* Hbase */
implementation group: 'org.apache.hbase', name: 'hbase-client', version: '2.1.0'
/* mysql */
implementation group: 'mysql', name: 'mysql-connector-java', version: '8.0.13'
/* 高性能异步组件:Vertx */
implementation group: 'io.vertx', name: 'vertx-core', version: '3.9.0'
implementation group: 'io.vertx', name: 'vertx-jdbc-client', version: '3.9.0'
implementation group: 'io.vertx', name: 'vertx-redis-client', version: '3.9.0'
/* 日志 */
testImplementation group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.7'
implementation group: 'log4j', name: 'log4j', version: '1.2.17'
//其它
testImplementation group: 'org.pentaho', name: 'pentaho-aggdesigner-algorithm', version: '5.1.5-jhyde'
implementation group: 'com.alibaba', name: 'fastjson', version: '1.2.44'
compileOnly group: 'org.projectlombok', name: 'lombok', version: '1.18.2'
}
jar {
//详细信息参考 https://docs.gradle.org/current/dsl/org.gradle.api.tasks.bundling.Jar.html
archivesBaseName = 'Example'//基本的文件名
manifest { //配置jar文件的manifest
attributes(
"Manifest-Version": 1.0,
'Main-Class': 'com.gtja.main.Flinkdemo' //指定main方法所在的文件
)
}
//gradle 处理重复文件,include 最后一个重复文件“胜出”的默认策略。
duplicatesStrategy = 'include' // <<---- addition
//打包依赖包
from {
(configurations.runtimeClasspath).collect {
it.isDirectory() ? it : zipTree(it)
}
}
}