Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F102972534
pom.xml
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Wed, Feb 26, 00:33
Size
9 KB
Mime Type
text/xml
Expires
Fri, Feb 28, 00:33 (1 d, 23 h)
Engine
blob
Format
Raw Data
Handle
24349886
Attached To
R3704 elastic-yarn
pom.xml
View Options
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<project
xmlns=
"http://maven.apache.org/POM/4.0.0"
xmlns:xsi=
"http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
>
<modelVersion>
4.0.0
</modelVersion>
<parent>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-project
</artifactId>
<version>
2.3.0
</version>
<relativePath>
../hadoop-project
</relativePath>
</parent>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-mapreduce
</artifactId>
<version>
2.3.0
</version>
<packaging>
pom
</packaging>
<name>
hadoop-mapreduce
</name>
<url>
http://hadoop.apache.org/mapreduce/
</url>
<properties>
<test.logs>
true
</test.logs>
<test.timeout>
600000
</test.timeout>
<fork.mode>
once
</fork.mode>
<mr.basedir>
${
basedir
}
</mr.basedir>
<hadoop.component>
mapreduce
</hadoop.component>
<is.hadoop.component>
true
</is.hadoop.component>
</properties>
<modules>
<module>
hadoop-mapreduce-client
</module>
<module>
hadoop-mapreduce-examples
</module>
</modules>
<dependencies>
<dependency>
<groupId>
com.google.protobuf
</groupId>
<artifactId>
protobuf-java
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.avro
</groupId>
<artifactId>
avro
</artifactId>
<exclusions>
<exclusion>
<groupId>
org.mortbay.jetty
</groupId>
<artifactId>
jetty
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.ant
</groupId>
<artifactId>
ant
</artifactId>
</exclusion>
<exclusion>
<groupId>
io.netty
</groupId>
<artifactId>
netty
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.apache.velocity
</groupId>
<artifactId>
velocity
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</exclusion>
<exclusion>
<artifactId>
paranamer-ant
</artifactId>
<groupId>
com.thoughtworks.paranamer
</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
<scope>
provided
</scope>
<exclusions>
<exclusion>
<groupId>
commons-el
</groupId>
<artifactId>
commons-el
</artifactId>
</exclusion>
<exclusion>
<groupId>
tomcat
</groupId>
<artifactId>
jasper-runtime
</artifactId>
</exclusion>
<exclusion>
<groupId>
tomcat
</groupId>
<artifactId>
jasper-compiler
</artifactId>
</exclusion>
<exclusion>
<groupId>
org.mortbay.jetty
</groupId>
<artifactId>
jsp-2.1-jetty
</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-api
</artifactId>
</dependency>
<dependency>
<groupId>
org.slf4j
</groupId>
<artifactId>
slf4j-log4j12
</artifactId>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-annotations
</artifactId>
</dependency>
<dependency>
<groupId>
org.mockito
</groupId>
<artifactId>
mockito-all
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-common
</artifactId>
<type>
test-jar
</type>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-hdfs
</artifactId>
<scope>
test
</scope>
</dependency>
<dependency>
<groupId>
com.google.inject
</groupId>
<artifactId>
guice
</artifactId>
</dependency>
<dependency>
<groupId>
com.sun.jersey
</groupId>
<artifactId>
jersey-server
</artifactId>
</dependency>
<dependency>
<groupId>
com.sun.jersey.contribs
</groupId>
<artifactId>
jersey-guice
</artifactId>
</dependency>
<dependency>
<groupId>
com.google.inject.extensions
</groupId>
<artifactId>
guice-servlet
</artifactId>
</dependency>
<dependency>
<groupId>
junit
</groupId>
<artifactId>
junit
</artifactId>
</dependency>
<dependency>
<groupId>
io.netty
</groupId>
<artifactId>
netty
</artifactId>
</dependency>
<dependency>
<groupId>
commons-io
</groupId>
<artifactId>
commons-io
</artifactId>
</dependency>
<dependency>
<groupId>
org.hsqldb
</groupId>
<artifactId>
hsqldb
</artifactId>
<scope>
compile
</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>
maven-antrun-plugin
</artifactId>
<executions>
<execution>
<id>
tar
</id>
<phase>
package
</phase>
<goals>
<goal>
run
</goal>
</goals>
<configuration>
<!-- this is identical from hadoop-project-dist, eventually they must be unified -->
<target
if=
"tar"
>
<!-- Using Unix script to preserve symlinks -->
<echo
file=
"
${
project
.
build
.
directory
}
/dist-maketar.sh"
>
cd "
${
project
.
build
.
directory
}
"
tar cf -
${
project
.
artifactId
}
-
${
project
.
version
}
| gzip >
${
project
.
artifactId
}
-
${
project
.
version
}
.tar.gz
</echo>
<exec
executable=
"sh"
dir=
"
${
project
.
build
.
directory
}
"
failonerror=
"true"
>
<arg
line=
"./dist-maketar.sh"
/>
</exec>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.codehaus.mojo
</groupId>
<artifactId>
findbugs-maven-plugin
</artifactId>
<configuration>
<findbugsXmlOutput>
true
</findbugsXmlOutput>
<xmlOutput>
true
</xmlOutput>
<excludeFilterFile>
${
mr
.
basedir
}
/dev-support/findbugs-exclude.xml
</excludeFilterFile>
<effort>
Max
</effort>
</configuration>
</plugin>
<plugin>
<groupId>
org.apache.rat
</groupId>
<artifactId>
apache-rat-plugin
</artifactId>
<configuration>
<excludes>
<exclude>
.eclipse.templates/
</exclude>
<exclude>
CHANGES.txt
</exclude>
<exclude>
lib/jdiff/**
</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-surefire-plugin
</artifactId>
<configuration>
<properties>
<property>
<name>
listener
</name>
<value>
org.apache.hadoop.test.TimedOutTestsListener
</value>
</property>
</properties>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>
dist
</id>
<activation>
<activeByDefault>
false
</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>
maven-source-plugin
</artifactId>
<executions>
<execution>
<id>
attach-sources
</id>
<goals>
<!-- avoid warning about recursion -->
<goal>
jar-no-fork
</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>
org.apache.maven.plugins
</groupId>
<artifactId>
maven-assembly-plugin
</artifactId>
<dependencies>
<dependency>
<groupId>
org.apache.hadoop
</groupId>
<artifactId>
hadoop-assemblies
</artifactId>
<version>
${
project
.
version
}
</version>
</dependency>
</dependencies>
<configuration>
<tarLongFileMode>
gnu
</tarLongFileMode>
<appendAssemblyId>
false
</appendAssemblyId>
<attach>
false
</attach>
<finalName>
${
project
.
artifactId
}
-
${
project
.
version
}
</finalName>
<descriptorRefs>
<descriptorRef>
hadoop-mapreduce-dist
</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>
package-mapreduce
</id>
<phase>
prepare-package
</phase>
<goals>
<goal>
single
</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<reporting>
<plugins>
<plugin>
<groupId>
org.codehaus.mojo
</groupId>
<artifactId>
findbugs-maven-plugin
</artifactId>
<!-- until we have reporting management cf. MSITE-443 -->
<version>
2.3.2
</version>
<configuration>
<findbugsXmlOutput>
true
</findbugsXmlOutput>
<xmlOutput>
true
</xmlOutput>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
Event Timeline
Log In to Comment