1 <?xml version=
"1.0" encoding=
"UTF-8"?>
3 Licensed to the Apache Software Foundation (ASF) under one or more
4 contributor license agreements. See the NOTICE file distributed with
5 this work for additional information regarding copyright ownership.
6 The ASF licenses this file to You under the Apache License, Version
2.0
7 (the
"License"); you may not use this file except in compliance with
8 the License. You may obtain a copy of the License at
10 http://www.apache.org/licenses/LICENSE-
2.0
12 Unless required by applicable law or agreed to in writing, software
13 distributed under the License is distributed on an
"AS IS" BASIS,
14 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 See the License for the specific language governing permissions and
16 limitations under the License.
18 <project xmlns=
"http://maven.apache.org/POM/4.0.0" xmlns:
xsi=
"http://www.w3.org/2001/XMLSchema-instance" xsi:
schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
20 <modelVersion>4.0.0</modelVersion>
23 <groupId>org.apache
</groupId>
24 <artifactId>apache
</artifactId>
28 <groupId>org.apache.carbondata
</groupId>
29 <artifactId>carbondata-parent
</artifactId>
30 <name>Apache CarbonData :: Parent
</name>
31 <description>Apache CarbonData is an indexed columnar data format for fast analytics
32 on big data platform, e.g.Apache Hadoop, Apache Spark, etc.
34 <url>http://carbondata.apache.org
</url>
35 <inceptionYear>2016</inceptionYear>
36 <packaging>pom
</packaging>
38 <version>1.3.0-SNAPSHOT
</version>
42 <name>Apache License, Version
2.0</name>
43 <url>http://www.apache.org/licenses/LICENSE-
2.0.txt
</url>
44 <distribution>repo
</distribution>
49 <connection>scm:git:https://git-wip-us.apache.org/repos/asf/carbondata.git
</connection>
50 <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/carbondata.git
</developerConnection>
51 <url>https://git-wip-us.apache.org/repos/asf/carbondata.git
</url>
57 <url>https://issues.apache.org/jira/browse/CARBONDATA
</url>
62 <name>CarbonData Dev
</name>
63 <subscribe>dev-subscribe@carbondata.apache.org
</subscribe>
64 <unsubscribe>dev-unsubscribe@carbondata.apache.org
</unsubscribe>
65 <post>dev@carbondata.apache.org
</post>
68 <name>CarbonData Issues
</name>
69 <subscribe>issues-subscribe@carbondata.apache.org
</subscribe>
70 <unsubscribe>issues-unsubscribe@carbondata.apache.org
</unsubscribe>
71 <post>issues@carbondata.apache.org
</post>
74 <name>CarbonData User
</name>
75 <subscribe>user-subscribe@carbondata.apache.org
</subscribe>
76 <unsubscribe>user-unsubscribe@carbondata.apache.org
</unsubscribe>
77 <post>user@carbondata.apache.org
</post>
80 <name>CarbonData Commits
</name>
81 <subscribe>commits-subscribe@carbondata.apache.org
</subscribe>
82 <unsubscribe>commits-unsubscribe@carbondata.apache.org
</unsubscribe>
83 <post>commits@carbondata.apache.org
</post>
89 <name>The Apache CarbonData Team
</name>
90 <email>dev@carbondata.apache.org
</email>
91 <url>http://carbondata.apache.org
</url>
92 <organization>Apache Software Foundation
</organization>
93 <organizationUrl>http://www.apache.org
</organizationUrl>
98 <module>common
</module>
100 <module>processing
</module>
101 <module>hadoop
</module>
102 <module>store/sdk
</module>
103 <module>integration/spark-common
</module>
104 <module>integration/spark-common-test
</module>
105 <module>assembly
</module>
109 <project.build.sourceEncoding
>UTF-
8</project.build.sourceEncoding
>
110 <snappy.version
>1.1.2.6</snappy.version
>
111 <hadoop.version
>2.7.2</hadoop.version
>
112 <scala.binary.version
>2.11</scala.binary.version
>
113 <scala.version
>2.11.8</scala.version
>
114 <hadoop.deps.scope
>compile
</hadoop.deps.scope
>
115 <spark.deps.scope
>compile
</spark.deps.scope
>
116 <scala.deps.scope
>compile
</scala.deps.scope
>
117 <dev.path
>${basedir}/dev
</dev.path
>
118 <spark.master.url
>local[
2]
</spark.master.url
>
119 <hdfs.url
>local
</hdfs.url
>
120 <suite.name
>org.apache.carbondata.cluster.sdv.suite.SDVSuites
</suite.name
>
121 <script.exetension
>.sh
</script.exetension
>
122 <carbon.hive.based.metastore
>false
</carbon.hive.based.metastore
>
128 <!-- This should be at top, it makes maven try the central repo first and then others and hence faster dep resolution --
>
129 <name>Maven Repository
</name>
130 <url>http://repo1.maven.org/maven2
</url>
132 <enabled>true
</enabled>
137 <dependencyManagement>
140 <groupId>org.apache.hadoop
</groupId>
141 <artifactId>hadoop-common
</artifactId>
142 <version>${hadoop.version}
</version>
143 <scope>${hadoop.deps.scope}
</scope>
146 <groupId>io.netty
</groupId>
147 <artifactId>netty-all
</artifactId>
150 <groupId>javax.servlet
</groupId>
151 <artifactId>*
</artifactId>
154 <groupId>javax.servlet.jsp
</groupId>
155 <artifactId>*
</artifactId>
160 <groupId>org.apache.hadoop
</groupId>
161 <artifactId>hadoop-hdfs
</artifactId>
162 <version>${hadoop.version}
</version>
163 <scope>${hadoop.deps.scope}
</scope>
166 <groupId>io.netty
</groupId>
167 <artifactId>netty-all
</artifactId>
170 <groupId>javax.servlet
</groupId>
171 <artifactId>*
</artifactId>
174 <groupId>javax.servlet.jsp
</groupId>
175 <artifactId>*
</artifactId>
180 <groupId>org.apache.hadoop
</groupId>
181 <artifactId>hadoop-mapreduce-client-core
</artifactId>
182 <version>${hadoop.version}
</version>
183 <scope>${hadoop.deps.scope}
</scope>
186 <groupId>io.netty
</groupId>
187 <artifactId>netty-all
</artifactId>
190 <groupId>javax.servlet
</groupId>
191 <artifactId>*
</artifactId>
194 <groupId>javax.servlet.jsp
</groupId>
195 <artifactId>*
</artifactId>
200 <groupId>org.apache.spark
</groupId>
201 <artifactId>spark-sql_${scala.binary.version}
</artifactId>
202 <version>${spark.version}
</version>
203 <scope>${spark.deps.scope}
</scope>
206 <groupId>org.apache.spark
</groupId>
207 <artifactId>spark-hive-thriftserver_${scala.binary.version}
</artifactId>
208 <version>${spark.version}
</version>
209 <scope>${spark.deps.scope}
</scope>
212 <groupId>org.apache.spark
</groupId>
213 <artifactId>spark-repl_${scala.binary.version}
</artifactId>
214 <version>${spark.version}
</version>
215 <scope>${spark.deps.scope}
</scope>
218 <groupId>org.scala-lang
</groupId>
219 <artifactId>scala-compiler
</artifactId>
220 <version>${scala.version}
</version>
221 <scope>${scala.deps.scope}
</scope>
224 <groupId>org.scala-lang
</groupId>
225 <artifactId>scala-reflect
</artifactId>
226 <version>${scala.version}
</version>
227 <scope>${scala.deps.scope}
</scope>
230 <groupId>org.scala-lang
</groupId>
231 <artifactId>scala-library
</artifactId>
232 <version>${scala.version}
</version>
233 <scope>${scala.deps.scope}
</scope>
236 <groupId>org.scala-lang
</groupId>
237 <artifactId>scala-actors
</artifactId>
238 <version>${scala.version}
</version>
239 <scope>${scala.deps.scope}
</scope>
242 <groupId>org.scala-lang
</groupId>
243 <artifactId>scalap
</artifactId>
244 <version>${scala.version}
</version>
245 <scope>${scala.deps.scope}
</scope>
248 <groupId>org.jmockit
</groupId>
249 <artifactId>jmockit
</artifactId>
250 <version>1.10</version>
254 <artifactId>*
</artifactId>
260 <groupId>junit
</groupId>
261 <artifactId>junit
</artifactId>
262 <version>4.11</version>
266 <groupId>org.scalatest
</groupId>
267 <artifactId>scalatest_${scala.binary.version}
</artifactId>
268 <version>2.2.1</version>
271 </dependencyManagement>
277 <groupId>org.apache.maven.plugins
</groupId>
278 <artifactId>maven-javadoc-plugin
</artifactId>
280 <additionalparam>-Xdoclint:missing
</additionalparam>
284 <groupId>org.apache.maven.plugins
</groupId>
285 <artifactId>maven-shade-plugin
</artifactId>
286 <version>2.4.3</version>
293 <groupId>org.apache.maven.plugins
</groupId>
294 <artifactId>maven-checkstyle-plugin
</artifactId>
295 <version>2.17</version>
297 <configLocation>${dev.path}/javastyle-config.xml
</configLocation>
298 <suppressionsLocation>${dev.path}/javastyle-suppressions.xml
</suppressionsLocation>
299 <headerLocation>${dev.path}/java.header
</headerLocation>
300 <consoleOutput>true
</consoleOutput>
301 <failsOnError>true
</failsOnError>
302 <linkXRef>false
</linkXRef>
303 <failOnViolation>true
</failOnViolation>
304 <includeTestSourceDirectory>false
</includeTestSourceDirectory>
305 <sourceDirectory>${basedir}/src/main/java
</sourceDirectory>
306 <testSourceDirectory>${basedir}/src/test/java
</testSourceDirectory>
307 <outputFile>${basedir}/target/checkstyle-output.xml
</outputFile>
318 <groupId>org.scalastyle
</groupId>
319 <artifactId>scalastyle-maven-plugin
</artifactId>
320 <version>0.8.0</version>
329 <verbose>false
</verbose>
330 <failOnViolation>true
</failOnViolation>
331 <includeTestSourceDirectory>false
</includeTestSourceDirectory>
332 <failOnWarning>false
</failOnWarning>
333 <sourceDirectory>${basedir}/src/main/scala
</sourceDirectory>
334 <testSourceDirectory>${basedir}/src/test/scala
</testSourceDirectory>
335 <configLocation>${dev.path}/scalastyle-config.xml
</configLocation>
336 <outputFile>${basedir}/target/scalastyle-output.xml
</outputFile>
337 <outputEncoding>${project.build.sourceEncoding}
</outputEncoding>
341 <groupId>org.codehaus.mojo
</groupId>
342 <artifactId>findbugs-maven-plugin
</artifactId>
343 <version>3.0.4</version>
345 <excludeFilterFile>${dev.path}/findbugs-exclude.xml
</excludeFilterFile>
346 <failOnError>true
</failOnError>
347 <findbugsXmlOutput>true
</findbugsXmlOutput>
348 <xmlOutput>true
</xmlOutput>
353 <id>analyze-compile
</id>
354 <phase>compile
</phase>
362 <groupId>org.jacoco
</groupId>
363 <artifactId>jacoco-maven-plugin
</artifactId>
364 <version>0.7.9</version>
367 <id>default-prepare-agent
</id>
369 <goal>prepare-agent
</goal>
373 <id>default-prepare-agent-integration
</id>
375 <goal>prepare-agent-integration
</goal>
379 <id>default-report
</id>
385 <id>default-report-integration
</id>
387 <goal>report-integration
</goal>
391 <id>default-check
</id>
396 <append>true
</append>
398 <exclude>**/*SparkUnknownExpression*.class
</exclude>
399 <exclude>**/org/apache/carbondata/cluster/sdv/generated/*
</exclude>
400 <exclude>**/org.apache.carbondata.cluster.sdv.generated.*
</exclude>
403 <include>**/org.apache.*
</include>
406 <!-- implementation is needed only for Maven
2 --
>
407 <rule implementation=
"org.jacoco.maven.RuleConfiguration">
408 <element>BUNDLE
</element>
410 <!-- implementation is needed only for Maven
2 --
>
411 <limit implementation=
"org.jacoco.report.check.Limit">
412 <counter>COMPLEXITY
</counter>
413 <value>COVEREDRATIO
</value>
414 <minimum>0.10</minimum>
429 <!--This profile does not build spark module, so user should explicitly give spark profile also like spark-
2.1 --
>
430 <id>build-with-format
</id>
432 <module>format
</module>
436 <!-- This profile only should be used for release prepare to cover all the modules --
>
439 <spark.version
>2.2.1</spark.version
>
440 <maven.test.skip
>true
</maven.test.skip
>
443 <module>format
</module>
444 <module>integration/spark2
</module>
445 <module>examples/spark2
</module>
446 <module>datamap/examples
</module>
447 <module>integration/hive
</module>
448 <module>integration/presto
</module>
449 <module>examples/flink
</module>
450 <module>streaming
</module>
455 <groupId>org.jacoco
</groupId>
456 <artifactId>jacoco-maven-plugin
</artifactId>
465 <id>hadoop-
2.2.0</id>
467 <hadoop.version
>2.2.0</hadoop.version
>
473 <spark.version
>2.1.0</spark.version
>
474 <scala.binary.version
>2.11</scala.binary.version
>
475 <scala.version
>2.11.8</scala.version
>
478 <module>integration/spark2
</module>
479 <module>integration/hive
</module>
480 <module>integration/presto
</module>
481 <module>streaming
</module>
482 <module>examples/spark2
</module>
483 <module>datamap/examples
</module>
488 <groupId>org.eluder.coveralls
</groupId>
489 <artifactId>coveralls-maven-plugin
</artifactId>
490 <version>4.3.0</version>
492 <repoToken>opPwqWW41vYppv6KISea3u1TJvE1ugJ5Y
</repoToken>
493 <sourceEncoding>UTF-
8</sourceEncoding>
495 <jacocoReport>${basedir}/target/carbondata-coverage-report/carbondata-coverage-report.xml
499 <sourceDirectory>${basedir}/common/src/main/java
</sourceDirectory>
500 <sourceDirectory>${basedir}/core/src/main/java
</sourceDirectory>
501 <sourceDirectory>${basedir}/processing/src/main/java
</sourceDirectory>
502 <sourceDirectory>${basedir}/hadoop/src/main/java
</sourceDirectory>
503 <sourceDirectory>${basedir}/integration/spark2/src/main/scala
</sourceDirectory>
504 <sourceDirectory>${basedir}/integration/spark2/src/main/java
</sourceDirectory>
505 <sourceDirectory>${basedir}/integration/spark2/src/main/spark2.1
</sourceDirectory>
506 <sourceDirectory>${basedir}/integration/spark-common/src/main/scala
</sourceDirectory>
507 <sourceDirectory>${basedir}/integration/spark-common/src/main/java
</sourceDirectory>
508 <sourceDirectory>${basedir}/integration/spark-common-test/src/main/scala
</sourceDirectory>
509 <sourceDirectory>${basedir}/integration/spark-common-test/src/main/java
</sourceDirectory>
510 <sourceDirectory>${basedir}/integration/hive/src/main/scala
</sourceDirectory>
511 <sourceDirectory>${basedir}/integration/hive/src/main/java
</sourceDirectory>
512 <sourceDirectory>${basedir}/integration/presto/src/main/scala
</sourceDirectory>
513 <sourceDirectory>${basedir}/integration/presto/src/main/java
</sourceDirectory>
514 <sourceDirectory>${basedir}/streaming/src/main/java
</sourceDirectory>
515 <sourceDirectory>${basedir}/streaming/src/main/scala
</sourceDirectory>
525 <activeByDefault>true
</activeByDefault>
528 <spark.version
>2.2.1</spark.version
>
529 <scala.binary.version
>2.11</scala.binary.version
>
530 <scala.version
>2.11.8</scala.version
>
533 <module>integration/spark2
</module>
534 <module>integration/hive
</module>
535 <module>integration/presto
</module>
536 <module>streaming
</module>
537 <module>examples/spark2
</module>
542 <groupId>org.eluder.coveralls
</groupId>
543 <artifactId>coveralls-maven-plugin
</artifactId>
544 <version>4.3.0</version>
546 <repoToken>opPwqWW41vYppv6KISea3u1TJvE1ugJ5Y
</repoToken>
547 <sourceEncoding>UTF-
8</sourceEncoding>
549 <jacocoReport>${basedir}/target/carbondata-coverage-report/carbondata-coverage-report.xml
553 <sourceDirectory>${basedir}/common/src/main/java
</sourceDirectory>
554 <sourceDirectory>${basedir}/core/src/main/java
</sourceDirectory>
555 <sourceDirectory>${basedir}/processing/src/main/java
</sourceDirectory>
556 <sourceDirectory>${basedir}/hadoop/src/main/java
</sourceDirectory>
557 <sourceDirectory>${basedir}/integration/spark2/src/main/scala
</sourceDirectory>
558 <sourceDirectory>${basedir}/integration/spark2/src/main/spark2.2
</sourceDirectory>
559 <sourceDirectory>${basedir}/integration/spark2/src/main/java
</sourceDirectory>
560 <sourceDirectory>${basedir}/integration/spark-common/src/main/scala
</sourceDirectory>
561 <sourceDirectory>${basedir}/integration/spark-common/src/main/java
</sourceDirectory>
562 <sourceDirectory>${basedir}/integration/spark-common-test/src/main/scala
</sourceDirectory>
563 <sourceDirectory>${basedir}/integration/spark-common-test/src/main/java
</sourceDirectory>
564 <sourceDirectory>${basedir}/integration/hive/src/main/scala
</sourceDirectory>
565 <sourceDirectory>${basedir}/integration/hive/src/main/java
</sourceDirectory>
566 <sourceDirectory>${basedir}/integration/presto/src/main/scala
</sourceDirectory>
567 <sourceDirectory>${basedir}/integration/presto/src/main/java
</sourceDirectory>
568 <sourceDirectory>${basedir}/streaming/src/main/java
</sourceDirectory>
569 <sourceDirectory>${basedir}/streaming/src/main/scala
</sourceDirectory>
582 <module>store/sdk
</module>
588 <module>integration/spark-common-cluster-test
</module>
596 <groupId>org.apache.rat
</groupId>
597 <artifactId>apache-rat-plugin
</artifactId>
598 <version>0.12</version>
601 <phase>verify
</phase>
608 <reportFile>${project.build.directory}/${project.build.finalName}.rat
</reportFile>
609 <excludeSubProjects>false
</excludeSubProjects>
610 <consoleOutput>true
</consoleOutput>
611 <useDefaultExcludes>true
</useDefaultExcludes>
613 <exclude>**/target/**/*
</exclude>
614 <exclude>.github/**/*
</exclude>
615 <exclude>**/*.iml
</exclude>
616 <exclude>**/src/test/**/*
</exclude>
617 <exclude>examples/**/*.csv
</exclude>
627 <script.exetension
>.bat
</script.exetension
>