3 Star 21 Fork 11

祝威廉 / spark-binlog

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
pom.xml.versionsBackup 13.21 KB
一键复制 编辑 原始数据 按行查看 历史
WilliamZhu 提交于 2020-03-19 17:47 . update
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>tech.mlsql</groupId>
<artifactId>spark-binlog_2.11</artifactId>
<packaging>pom</packaging>
<version>1.0.0-SNAPSHOT</version>
<modules>
<module>binlog-common</module>
<module>mysql-binlog</module>
<module>hbase-wal</module>
</modules>
<name>Spark MySQL Binlog DataSource</name>
<url>https://github.com/allwefantasy/spark-binlog</url>
<description>
A library for querying Binlog with Apache Spark structure streaming,
for Spark SQL , DataFrames and [MLSQL](http://www.mlsql.tech).
</description>
<licenses>
<license>
<name>Apache 2.0 License</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<developers>
<developer>
<id>allwefantasy</id>
<name>ZhuHaiLin</name>
<email>allwefantasy@gmail.com</email>
</developer>
</developers>
<scm>
<connection>
scm:git:git@github.com:allwefantasy/spark-binlog.git
</connection>
<developerConnection>
scm:git:git@github.com:allwefantasy/spark-binlog.git
</developerConnection>
<url>https://github.com/allwefantasy/spark-binlog</url>
</scm>
<issueManagement>
<url>https://github.com/allwefantasy/spark-binlog/issues</url>
</issueManagement>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<scala.version>2.11.8</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<scala.jline.version>2.11.0-M3</scala.jline.version>
<spark.version>2.4.3</spark.version>
<spark.bigversion>2.4</spark.bigversion>
<mlsql.version>1.2.0</mlsql.version>
<hbase.version>2.0.4</hbase.version>
<guava.version>16.0</guava.version>
<httpclient.version>4.5.3</httpclient.version>
<serviceframework.version>2.0.3</serviceframework.version>
<scope>provided</scope>
<hadoop-client-version>2.6.5</hadoop-client-version>
<common-utils.version>0.2.5</common-utils.version>
<binlog.version>0.18.1</binlog.version>
<hbase.version>2.0.4</hbase.version>
</properties>
<dependencies>
<dependency>
<groupId>tech.mlsql</groupId>
<artifactId>common-utils_${scala.binary.version}</artifactId>
<version>${common-utils.version}</version>
</dependency>
<dependency>
<groupId>org.scalactic</groupId>
<artifactId>scalactic_${scala.binary.version}</artifactId>
<version>3.0.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>3.0.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-graphx_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql-kafka-0-10_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.pegdown</groupId>
<artifactId>pegdown</artifactId>
<version>1.6.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>tech.mlsql</groupId>
<artifactId>delta-plus_${scala.binary.version}</artifactId>
<version>0.2.0</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>scala-2.12</id>
<properties>
<scala.version>2.12.8</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<scala.jline.version>2.12.1</scala.jline.version>
</properties>
</profile>
<profile>
<id>spark-3.0.0</id>
<properties>
<spark.version>3.0.0-preview2</spark.version>
<spark.bigversion>3.0</spark.bigversion>
</properties>
</profile>
<profile>
<id>disable-java8-doclint</id>
<activation>
<jdk>[1.8,)</jdk>
</activation>
<properties>
<additionalparam>-Xdoclint:none</additionalparam>
<doclint>none</doclint>
</properties>
</profile>
<profile>
<id>release-sign-artifacts</id>
<activation>
<property>
<name>performRelease</name>
<value>true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.1</version>
<executions>
<execution>
<id>sign-artifacts</id>
<phase>verify</phase>
<goals>
<goal>sign</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<sourceDirectory>src/main/java/</sourceDirectory>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M1</version>
<configuration>
<forkCount>1</forkCount>
<reuseForks>true</reuseForks>
<argLine>-Xmx4024m</argLine>
<includes>
<include>**/*.java</include>
<include>**/*.scala</include>
</includes>
</configuration>
</plugin>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<configuration>
<args>
<arg>
-g:vars
</arg>
</args>
<verbose>true</verbose>
</configuration>
<executions>
<execution>
<id>scala-compile-first</id>
<phase>process-resources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>scala-test-compile</id>
<phase>process-test-resources</phase>
<goals>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<!--<compilerArgument>-parameters</compilerArgument>-->
<compilerArgument>-g</compilerArgument>
<verbose>true</verbose>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<!-- 要将源码放上去,需要加入这个插件 -->
<plugin>
<artifactId>maven-source-plugin</artifactId>
<version>2.1</version>
<configuration>
<attach>true</attach>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.7</version>
<extensions>true</extensions>
<configuration>
<serverId>sonatype-nexus-staging</serverId>
<nexusUrl>https://oss.sonatype.org/</nexusUrl>
<autoReleaseAfterClose>true</autoReleaseAfterClose>
</configuration>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<version>2.0.0</version>
<configuration>
<tagsToExclude>streaming.core.NotToRunTag</tagsToExclude>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
<htmlreporters>${project.build.directory}/html/scalatest</htmlreporters>
<testFailureIgnore>false</testFailureIgnore>
</configuration>
<executions>
<execution>
<id>test</id>
<goals>
<goal>test</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<distributionManagement>
<snapshotRepository>
<id>sonatype-nexus-snapshots</id>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
</snapshotRepository>
<repository>
<id>sonatype-nexus-staging</id>
<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/
</url>
</repository>
</distributionManagement>
</project>
Java
1
https://gitee.com/allwefantasy/spark-binlog.git
git@gitee.com:allwefantasy/spark-binlog.git
allwefantasy
spark-binlog
spark-binlog
master

搜索帮助