Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F102915846
pom.xml
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Tue, Feb 25, 11:28
Size
25 KB
Mime Type
text/xml
Expires
Thu, Feb 27, 11:28 (2 d)
Engine
blob
Format
Raw Data
Handle
24159738
Attached To
R3704 elastic-yarn
pom.xml
View Options
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<modelVersion>
4.0.0
</modelVersion>
<parent>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-project-dist
</artifactId>
<version>
2.3.0
</version>
<relativePath>
../../hadoop-project-dist
</relativePath>
</parent>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
<version>
2.3.0
</version>
<description>
Apache Hadoop HDFS
</description>
<name>
Apache Hadoop HDFS
</name>
<packaging>
jar
</packaging>
<properties>
<hadoop.component>
hdfs
</hadoop.component>
<is.hadoop.component>
true
</is.hadoop.component>
<require.fuse>
false
</require.fuse>
<require.libwebhdfs>
false
</require.libwebhdfs>
</properties>
<dependencies>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-annotations
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-auth
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
<scope>
test
</scope>
<type>
test-jar
</type>
</dependency>
<dependency>
<groupId>
org.apache.zookeeper
</groupId>
<artifactId>
zookeeper
</artifactId>
<type>
test-jar
</type>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
com.google.guava
</groupId>
<artifactId>
guava
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
org.mortbay.jetty
</groupId>
<artifactId>
jetty
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
org.mortbay.jetty
</groupId>
<artifactId>
jetty-util
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
com.sun.jersey
</groupId>
<artifactId>
jersey-core
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
com.sun.jersey
</groupId>
<artifactId>
jersey-server
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-cli
</groupId>
<artifactId>
commons-cli
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-codec
</groupId>
<artifactId>
commons-codec
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-io
</groupId>
<artifactId>
commons-io
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-lang
</groupId>
<artifactId>
commons-lang
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-logging
</groupId>
<artifactId>
commons-logging
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
commons-daemon
</groupId>
<artifactId>
commons-daemon
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
javax.servlet.jsp
</groupId>
<artifactId>
jsp-api
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
log4j
</groupId>
<artifactId>
log4j
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
com.google.protobuf
</groupId>
<artifactId>
protobuf-java
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
javax.servlet
</groupId>
<artifactId>
servlet-api
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
junit
</groupId>
<artifactId>
junit
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.mockito
</groupId>
<artifactId>
mockito-all
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-log4j12
</artifactId>
<scope>
provided
</scope>
</dependency>
<dependency>
<groupId>
org.codehaus.jackson
</groupId>
<artifactId>
jackson-core-asl
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
org.codehaus.jackson
</groupId>
<artifactId>
jackson-mapper-asl
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
tomcat
</groupId>
<artifactId>
jasper-runtime
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
xmlenc
</groupId>
<artifactId>
xmlenc
</artifactId>
<scope>
compile
</scope>
</dependency>
<dependency>
<groupId>
io.netty
</groupId>
<artifactId>
netty
</artifactId>
<scope>
test
</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-surefire-plugin
</artifactId>
<configuration>
<properties>
<property>
<name>
listener
</name>
<value>
org.apache.hadoop.test.TimedOutTestsListener
</value>
</property>
</properties>
</configuration>
</plugin>
<plugin>
<groupId>
org.codehaus.mojo.jspc
</groupId>
<artifactId>
jspc-maven-plugin
</artifactId>
<executions>
<execution>
<id>
hdfs
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
compile
</goal>
</goals>
<configuration>
<compile>
false
</compile>
<workingDirectory>
${
project
.
build
.
directory
}
/generated-sources/java
</workingDirectory>
<webFragmentFile>
${
project
.
build
.
directory
}
/hdfs-jsp-servlet-definitions.xml
</webFragmentFile>
<packageName>
org.apache.hadoop.hdfs.server.namenode
</packageName>
<sources>
<directory>
${
basedir
}
/src/main/webapps/hdfs
</directory>
<includes>
<include>
*.jsp
</include>
</includes>
</sources>
</configuration>
</execution>
<execution>
<id>
secondary
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
compile
</goal>
</goals>
<configuration>
<compile>
false
</compile>
<workingDirectory>
${
project
.
build
.
directory
}
/generated-sources/java
</workingDirectory>
<webFragmentFile>
${
project
.
build
.
directory
}
/secondary-jsp-servlet-definitions.xml
</webFragmentFile>
<packageName>
org.apache.hadoop.hdfs.server.namenode
</packageName>
<sources>
<directory>
${
basedir
}
/src/main/webapps/secondary
</directory>
<includes>
<include>
*.jsp
</include>
</includes>
</sources>
</configuration>
</execution>
<execution>
<id>
journal
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
compile
</goal>
</goals>
<configuration>
<compile>
false
</compile>
<workingDirectory>
${
project
.
build
.
directory
}
/generated-sources/java
</workingDirectory>
<webFragmentFile>
${
project
.
build
.
directory
}
/journal-jsp-servlet-definitions.xml
</webFragmentFile>
<packageName>
org.apache.hadoop.hdfs.server.journalservice
</packageName>
<sources>
<directory>
${
basedir
}
/src/main/webapps/journal
</directory>
<includes>
<include>
*.jsp
</include>
</includes>
</sources>
</configuration>
</execution>
<execution>
<id>
datanode
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
compile
</goal>
</goals>
<configuration>
<compile>
false
</compile>
<workingDirectory>
${
project
.
build
.
directory
}
/generated-sources/java
</workingDirectory>
<webFragmentFile>
${
project
.
build
.
directory
}
/datanode-jsp-servlet-definitions.xml
</webFragmentFile>
<packageName>
org.apache.hadoop.hdfs.server.datanode
</packageName>
<sources>
<directory>
${
basedir
}
/src/main/webapps/datanode
</directory>
<includes>
<include>
*.jsp
</include>
</includes>
</sources>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>
org.codehaus.mojo.jspc
</groupId>
<artifactId>
jspc-compiler-tomcat5
</artifactId>
<version>
2.0-alpha-3
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-log4j12
</artifactId>
<version>
1.4.1
</version>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
jcl104-over-slf4j
</artifactId>
<version>
1.4.1
</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>
org.codehaus.mojo
</groupId>
<artifactId>
build-helper-maven-plugin
</artifactId>
<executions>
<execution>
<id>
add-jsp-generated-sources-directory
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
add-source
</goal>
</goals>
<configuration>
<sources>
<source>
${
project
.
build
.
directory
}
/generated-sources/java
</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-antrun-plugin
</artifactId>
<configuration>
<skipTests>
false
</skipTests>
</configuration>
<executions>
<execution>
<id>
create-jsp-generated-sources-directory
</id>
<phase>
initialize
</phase>
<goals>
<goal>
run
</goal>
</goals>
<configuration>
<target>
<mkdir
dir=
"
${
project
.
build
.
directory
}
/generated-sources/java"
/>
</target>
</configuration>
</execution>
<execution>
<id>
create-web-xmls
</id>
<phase>
compile
</phase>
<goals>
<goal>
run
</goal>
</goals>
<configuration>
<target>
<loadfile
property=
"hdfs.servlet.definitions"
srcFile=
"
${
project
.
build
.
directory
}
/hdfs-jsp-servlet-definitions.xml"
/>
<loadfile
property=
"secondary.servlet.definitions"
srcFile=
"
${
project
.
build
.
directory
}
/secondary-jsp-servlet-definitions.xml"
/>
<loadfile
property=
"datanode.servlet.definitions"
srcFile=
"
${
project
.
build
.
directory
}
/datanode-jsp-servlet-definitions.xml"
/>
<loadfile
property=
"journal.servlet.definitions"
srcFile=
"
${
project
.
build
.
directory
}
/journal-jsp-servlet-definitions.xml"
/>
<echoproperties
destfile=
"
${
project
.
build
.
directory
}
/webxml.properties"
>
<propertyset>
<propertyref
regex=
".*.servlet.definitions"
/>
</propertyset>
</echoproperties>
<filter
filtersfile=
"
${
project
.
build
.
directory
}
/webxml.properties"
/>
<copy
file=
"
${
basedir
}
/src/main/webapps/proto-hdfs-web.xml"
tofile=
"
${
project
.
build
.
directory
}
/webapps/hdfs/WEB-INF/web.xml"
filtering=
"true"
/>
<copy
file=
"
${
basedir
}
/src/main/webapps/proto-secondary-web.xml"
tofile=
"
${
project
.
build
.
directory
}
/webapps/secondary/WEB-INF/web.xml"
filtering=
"true"
/>
<copy
file=
"
${
basedir
}
/src/main/webapps/proto-datanode-web.xml"
tofile=
"
${
project
.
build
.
directory
}
/webapps/datanode/WEB-INF/web.xml"
filtering=
"true"
/>
<copy
file=
"
${
basedir
}
/src/main/webapps/proto-journal-web.xml"
tofile=
"
${
project
.
build
.
directory
}
/webapps/journal/WEB-INF/web.xml"
filtering=
"true"
/>
<copy
toDir=
"
${
project
.
build
.
directory
}
/webapps"
>
<fileset
dir=
"
${
basedir
}
/src/main/webapps"
>
<exclude
name=
"**/*.jsp"
/>
<exclude
name=
"**/proto-*-web.xml"
/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<id>
create-log-dir
</id>
<phase>
process-test-resources
</phase>
<goals>
<goal>
run
</goal>
</goals>
<configuration>
<target>
<delete
dir=
"
${
test
.
build
.
data
}
"
/>
<mkdir
dir=
"
${
test
.
build
.
data
}
"
/>
<mkdir
dir=
"
${
hadoop
.
log
.
dir
}
"
/>
<copy
todir=
"
${
project
.
build
.
directory
}
/test-classes/webapps"
>
<fileset
dir=
"
${
project
.
build
.
directory
}
/webapps"
>
<exclude
name=
"proto-*-web.xml"
/>
</fileset>
</copy>
</target>
</configuration>
</execution>
<execution>
<phase>
pre-site
</phase>
<goals>
<goal>
run
</goal>
</goals>
<configuration>
<tasks>
<copy
file=
"src/main/resources/hdfs-default.xml"
todir=
"src/site/resources"
/>
<copy
file=
"src/main/xsl/configuration.xsl"
todir=
"src/site/resources"
/>
</tasks>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-maven-plugins
</artifactId>
<executions>
<execution>
<id>
compile-protoc
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
protoc
</goal>
</goals>
<configuration>
<protocVersion>
${
protobuf
.
version
}
</protocVersion>
<protocCommand>
${
protoc
.
path
}
</protocCommand>
<imports>
<param>
${
basedir
}
/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${
basedir
}
/src/main/proto
</param>
</imports>
<source>
<directory>
${
basedir
}
/src/main/proto
</directory>
<includes>
<include>
HAZKInfo.proto
</include>
<include>
InterDatanodeProtocol.proto
</include>
<include>
JournalProtocol.proto
</include>
<include>
datatransfer.proto
</include>
<include>
hdfs.proto
</include>
</includes>
</source>
<output>
${
project
.
build
.
directory
}
/generated-sources/java
</output>
</configuration>
</execution>
<execution>
<id>
compile-protoc-datanode
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
protoc
</goal>
</goals>
<configuration>
<protocVersion>
${
protobuf
.
version
}
</protocVersion>
<protocCommand>
${
protoc
.
path
}
</protocCommand>
<imports>
<param>
${
basedir
}
/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${
basedir
}
/src/main/proto
</param>
</imports>
<source>
<directory>
${
basedir
}
/src/main/proto
</directory>
<includes>
<include>
ClientDatanodeProtocol.proto
</include>
<include>
DatanodeProtocol.proto
</include>
</includes>
</source>
<output>
${
project
.
build
.
directory
}
/generated-sources/java
</output>
</configuration>
</execution>
<execution>
<id>
compile-protoc-namenode
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
protoc
</goal>
</goals>
<configuration>
<protocVersion>
${
protobuf
.
version
}
</protocVersion>
<protocCommand>
${
protoc
.
path
}
</protocCommand>
<imports>
<param>
${
basedir
}
/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${
basedir
}
/src/main/proto
</param>
</imports>
<source>
<directory>
${
basedir
}
/src/main/proto
</directory>
<includes>
<include>
ClientNamenodeProtocol.proto
</include>
<include>
NamenodeProtocol.proto
</include>
</includes>
</source>
<output>
${
project
.
build
.
directory
}
/generated-sources/java
</output>
</configuration>
</execution>
<execution>
<id>
compile-protoc-qjournal
</id>
<phase>
generate-sources
</phase>
<goals>
<goal>
protoc
</goal>
</goals>
<configuration>
<protocVersion>
${
protobuf
.
version
}
</protocVersion>
<protocCommand>
${
protoc
.
path
}
</protocCommand>
<imports>
<param>
${
basedir
}
/../../hadoop-common-project/hadoop-common/src/main/proto
</param>
<param>
${
basedir
}
/src/main/proto
</param>
</imports>
<source>
<directory>
${
basedir
}
/src/main/proto
</directory>
<includes>
<include>
QJournalProtocol.proto
</include>
</includes>
</source>
<output>
${
project
.
build
.
directory
}
/generated-sources/java
</output>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-javadoc-plugin
</artifactId>
<configuration>
<excludePackageNames>
org.apache.hadoop.hdfs.protocol.proto
</excludePackageNames>
</configuration>
</plugin>
<plugin>
<groupId>
org.apache.rat
</groupId>
<artifactId>
apache-rat-plugin
</artifactId>
<configuration>
<excludes>
<exclude>
CHANGES.txt
</exclude>
<exclude>
CHANGES.HDFS-1623.txt
</exclude>
<exclude>
.idea/**
</exclude>
<exclude>
src/main/conf/*
</exclude>
<exclude>
src/main/docs/**
</exclude>
<exclude>
dev-support/findbugsExcludeFile.xml
</exclude>
<exclude>
dev-support/checkstyle*
</exclude>
<exclude>
dev-support/jdiff/**
</exclude>
<exclude>
dev-support/*tests
</exclude>
<exclude>
src/main/native/*
</exclude>
<exclude>
src/main/native/config/*
</exclude>
<exclude>
src/main/native/m4/*
</exclude>
<exclude>
src/test/empty-file
</exclude>
<exclude>
src/test/all-tests
</exclude>
<exclude>
src/test/resources/*.tgz
</exclude>
<exclude>
src/test/resources/data*
</exclude>
<exclude>
src/test/resources/editsStored*
</exclude>
<exclude>
src/test/resources/empty-file
</exclude>
<exclude>
src/main/native/util/tree.h
</exclude>
<exclude>
src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataXceiverAspects.aj
</exclude>
<exclude>
src/main/webapps/datanode/robots.txt
</exclude>
<exclude>
src/main/docs/releasenotes.html
</exclude>
<exclude>
src/contrib/**
</exclude>
<exclude>
src/site/resources/images/*
</exclude>
<exclude>
src/main/webapps/static/bootstrap-3.0.2/**
</exclude>
<exclude>
src/main/webapps/static/dust-full-2.0.0.min.js
</exclude>
<exclude>
src/main/webapps/static/dust-helpers-1.1.1.min.js
</exclude>
<exclude>
src/main/webapps/static/jquery-1.10.2.min.js
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>
windows
</id>
<activation>
<activeByDefault>
false
</activeByDefault>
<os>
<family>
windows
</family>
</os>
</activation>
<properties>
<windows.build>
true
</windows.build>
</properties>
</profile>
<profile>
<id>
native
</id>
<activation>
<activeByDefault>
false
</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-antrun-plugin
</artifactId>
<executions>
<execution>
<id>
make
</id>
<phase>
compile
</phase>
<goals><goal>
run
</goal></goals>
<configuration>
<target>
<mkdir
dir=
"
${
project
.
build
.
directory
}
/native"
/>
<exec
executable=
"cmake"
dir=
"
${
project
.
build
.
directory
}
/native"
failonerror=
"true"
>
<arg
line=
"
${
basedir
}
/src/ -DGENERATED_JAVAH=
${
project
.
build
.
directory
}
/native/javah -DJVM_ARCH_DATA_MODEL=
${
sun
.
arch
.
data
.
model
}
-DREQUIRE_LIBWEBHDFS=
${
require
.
libwebhdfs
}
-DREQUIRE_FUSE=
${
require
.
fuse
}
"
/>
</exec>
<exec
executable=
"make"
dir=
"
${
project
.
build
.
directory
}
/native"
failonerror=
"true"
>
<arg
line=
"VERBOSE=1"
/>
</exec>
<!-- The second make is a workaround for HADOOP-9215. It can
be removed when version 2.6 of cmake is no longer supported . -->
<exec
executable=
"make"
dir=
"
${
project
.
build
.
directory
}
/native"
failonerror=
"true"
></exec>
</target>
</configuration>
</execution>
<execution>
<id>
native_tests
</id>
<phase>
test
</phase>
<goals><goal>
run
</goal></goals>
<configuration>
<target>
<property
name=
"compile_classpath"
refid=
"maven.compile.classpath"
/>
<property
name=
"test_classpath"
refid=
"maven.test.classpath"
/>
<exec
executable=
"sh"
failonerror=
"true"
dir=
"
${
project
.
build
.
directory
}
/native/"
>
<arg
value=
"-c"
/>
<arg
value=
"[ x
$
SKIPTESTS = xtrue ] ||
${
project
.
build
.
directory
}
/native/test_libhdfs_threaded"
/>
<env
key=
"CLASSPATH"
value=
"
${
test_classpath
}
:
${
compile_classpath
}
"
/>
<env
key=
"SKIPTESTS"
value=
"
${
skipTests
}
"
/>
</exec>
<exec
executable=
"sh"
failonerror=
"true"
dir=
"
${
project
.
build
.
directory
}
/native/"
>
<arg
value=
"-c"
/>
<arg
value=
"[ x
$
SKIPTESTS = xtrue ] ||
${
project
.
build
.
directory
}
/native/test_libhdfs_threaded"
/>
<env
key=
"CLASSPATH"
value=
"
${
test_classpath
}
:
${
compile_classpath
}
"
/>
<env
key=
"SKIPTESTS"
value=
"
${
skipTests
}
"
/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>
parallel-tests
</id>
<build>
<plugins>
<plugin>
<artifactId>
maven-antrun-plugin
</artifactId>
<executions>
<execution>
<id>
create-parallel-tests-dirs
</id>
<phase>
test-compile
</phase>
<configuration>
<target>
<exec
executable=
"sh"
>
<arg
value=
"-c"
/>
<arg
value=
"for i in {1..
${
testsThreadCount
}
}; do mkdir -p
${
test
.
build
.
data
}
/
$
i; mkdir -p
${
hadoop
.
tmp
.
dir
}
/
$
i; done"
/>
</exec>
</target>
</configuration>
<goals>
<goal>
run
</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-surefire-plugin
</artifactId>
<configuration>
<forkCount>
${
testsThreadCount
}
</forkCount>
<argLine>
-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError -DminiClusterDedicatedDirs=true
</argLine>
<systemPropertyVariables>
<test.build.data>
${
test
.
build
.
data
}
/
${
surefire
.
forkNumber
}
</test.build.data>
<hadoop.tmp.dir>
${
hadoop
.
tmp
.
dir
}
/
${
surefire
.
forkNumber
}
</hadoop.tmp.dir>
</systemPropertyVariables>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Event Timeline
Log In to Comment