Build 2.2.0-2

This commit is contained in:
Robert Rati 2013-12-03 13:00:52 -05:00
parent dff591baa2
commit 25f0554dab
2 changed files with 243 additions and 202 deletions

View File

@ -161,7 +161,7 @@ index 6059d8c..bba1a00 100644
System.out.println("Running embedded servlet container at: http://" + host + ":" + port);
}
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 89691c6..37e73b5 100644
index 89691c6..82458e7 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -53,7 +53,7 @@
@ -220,7 +220,7 @@ index 89691c6..37e73b5 100644
<dependency>
<groupId>com.sun.jersey</groupId>
@@ -113,21 +136,26 @@
@@ -113,21 +136,30 @@
<artifactId>jersey-server</artifactId>
<scope>compile</scope>
</dependency>
@ -228,42 +228,44 @@ index 89691c6..37e73b5 100644
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-servlet</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ </dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- <scope>runtime</scope>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ <version>2.2.6</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <scope>runtime</scope>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-servlet-api</artifactId>
+ <version>7.0.37</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-servlet-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <scope>runtime</scope>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-el-api</artifactId>
+ <version>7.0.37</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-el-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
<groupId>commons-el</groupId>
@@ -213,6 +241,11 @@
@@ -213,6 +245,10 @@
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.google.code.findbugs</groupId>
+ <artifactId>jsr305</artifactId>
+ <version>1.3.9</version>
+ </dependency>
<dependency>
@ -1596,7 +1598,7 @@ index 4442281..44cf67e 100644
throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
index 13872c3..c2ceab7 100644
index 13872c3..38d2a14 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
@@ -85,12 +85,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@ -1615,20 +1617,30 @@ index 13872c3..c2ceab7 100644
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
@@ -180,11 +180,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -135,8 +135,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
@@ -180,8 +180,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index f8f2918..fd43842 100644
index f8f2918..b62015e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -71,12 +71,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@ -1671,19 +1683,18 @@ index f8f2918..fd43842 100644
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
@@ -166,11 +156,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -166,8 +156,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
<scope>compile</scope>
@@ -192,101 +177,90 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</dependency>
<dependency>
@@ -192,101 +182,77 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</configuration>
</plugin>
<plugin>
@ -1794,42 +1805,33 @@ index f8f2918..fd43842 100644
</execution>
</executions>
<dependencies>
+ <!-- volatile mix of glassfish 2.2 jspc impl & Tomcat's servlet 3.0 api
+ EXERCISE CAUTION when messing with these
+ -->
+ <dependency>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ <version>2.2.5</version>
+ <scope>runtime</scope>
+ </dependency>
<dependency>
- <groupId>org.codehaus.mojo.jspc</groupId>
- <artifactId>jspc-compiler-tomcat5</artifactId>
- <version>2.0-alpha-3</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-servlet-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>1.4.1</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-el-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>jcl104-over-slf4j</artifactId>
- <version>1.4.1</version>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${project.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
</dependencies>
</plugin>
@@ -329,7 +303,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -329,7 +295,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</execution>
<execution>
<id>create-web-xmls</id>
@ -2463,7 +2465,7 @@ index 11640d7..6290e72 100644
</exclusions>
</dependency>
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index b7a7456..abdf900 100644
index b7a7456..0bee9b1 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -63,7 +63,7 @@
@ -2516,7 +2518,20 @@ index b7a7456..abdf900 100644
</dependency>
<dependency>
@@ -404,12 +398,23 @@
@@ -368,6 +362,12 @@
</dependency>
<dependency>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ <version>2.2.6</version>
+ </dependency>
+
+ <dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
<version>2.0.5</version>
@@ -404,12 +404,23 @@
<artifactId>jersey-server</artifactId>
<version>${jersey.version}</version>
</dependency>
@ -2540,54 +2555,50 @@ index b7a7456..abdf900 100644
<dependency>
<groupId>com.google.inject.extensions</groupId>
@@ -448,34 +453,22 @@
@@ -448,14 +459,9 @@
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty-servlet-tester</artifactId>
- <version>6.1.26</version>
- </dependency>
- <dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- <version>5.5.23</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
+ <version>7.0.37</version>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
@@ -465,17 +471,16 @@
<groupId>ant</groupId>
<artifactId>ant</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.eclipse.jdt.core.compiler</groupId>
+ <artifactId>ecj</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <version>5.5.23</version>
- </dependency>
- <dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <version>2.1</version>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>test-jetty-servlet</artifactId>
+ <version>9.0.4.v20130625</version>
</dependency>
+ <!-- volatile mix of glassfish 2.2 jspc & Tomcat's servlet 3.0 api
+ EXERCISE CAUTION when messing with these
+ -->
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- <version>5.5.23</version>
- <exclusions>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ant</groupId>
- <artifactId>ant</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <version>5.5.23</version>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ <version>2.2.5</version>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <version>2.1</version>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-servlet-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
<groupId>commons-el</groupId>
@@ -483,6 +476,11 @@
@@ -483,6 +488,11 @@
<version>1.0</version>
</dependency>
<dependency>
@ -2599,7 +2610,7 @@ index b7a7456..abdf900 100644
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.1</version>
@@ -536,7 +534,7 @@
@@ -536,7 +546,7 @@
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
@ -2608,7 +2619,7 @@ index b7a7456..abdf900 100644
</dependency>
<dependency>
<groupId>org.apache.mina</groupId>
@@ -704,7 +702,7 @@
@@ -704,7 +714,7 @@
<dependency>
<groupId>org.apache.bookkeeper</groupId>
<artifactId>bookkeeper-server</artifactId>
@ -2617,7 +2628,19 @@ index b7a7456..abdf900 100644
<scope>compile</scope>
</dependency>
<dependency>
@@ -797,9 +795,9 @@
@@ -712,6 +722,11 @@
<artifactId>hsqldb</artifactId>
<version>2.0.0</version>
</dependency>
+ <dependency>
+ <groupId>com.google.code.findbugs</groupId>
+ <artifactId>jsr305</artifactId>
+ <version>1.3.9</version>
+ </dependency>
</dependencies>
</dependencyManagement>
@@ -797,9 +812,9 @@
<version>${avro.version}</version>
</plugin>
<plugin>

View File

@ -22,12 +22,12 @@
%global httpfs_services hadoop-httpfs.service
# Filter out undesired provides and requires
%global __requires_exclude_from ^%{_libdir}/%{name}/libhadoop.so.*$
%global __requires_exclude_from ^%{_libdir}/%{name}/libhadoop.so$
%global __provides_exclude_from ^%{_libdir}/%{name}/.*$
Name: hadoop
Version: 2.2.0
Release: 1%{?dist}
Release: 2%{?dist}
Summary: A software platform for processing vast amounts of data
# The BSD license file is missing
# https://issues.apache.org/jira/browse/HADOOP-9849
@ -50,13 +50,16 @@ Source12: hadoop-httpfs-env.sh
Source13: hdfs-create-dirs
# This patch includes the following upstream tickets:
# https://issues.apache.org/jira/browse/HADOOP-9594
# https://issues.apache.org/jira/browse/HADOOP-9605
# https://issues.apache.org/jira/browse/HADOOP-9607
# https://issues.apache.org/jira/browse/HADOOP-9610
# https://issues.apache.org/jira/browse/MAPREDUCE-5431
# https://issues.apache.org/jira/browse/HADOOP-9611
# https://issues.apache.org/jira/browse/HADOOP-9613
# https://issues.apache.org/jira/browse/HADOOP-9623
# https://issues.apache.org/jira/browse/HADOOP-9650
# https://issues.apache.org/jira/browse/HDFS-5411
# https://issues.apache.org/jira/browse/HADOOP-10067
# https://issues.apache.org/jira/browse/HDFS-5075
# https://issues.apache.org/jira/browse/HADOOP-10068
# https://issues.apache.org/jira/browse/HADOOP-10075
# https://issues.apache.org/jira/browse/HADOOP-10076
Patch0: hadoop-fedora-integration.patch
# Fedora packaging guidelines for JNI library loading
Patch2: hadoop-jni-library-loading.patch
@ -134,7 +137,6 @@ BuildRequires: jettison
BuildRequires: jetty
BuildRequires: jetty-jspc-maven-plugin
BuildRequires: jetty-util-ajax
BuildRequires: jline
BuildRequires: jsch
BuildRequires: json_simple
BuildRequires: jsr-305
@ -191,13 +193,13 @@ BuildRequires: jersey-test-framework
BuildRequires: maven-surefire-provider-junit4
%description
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
%package client
Summary: Libraries for Hadoop clients
Summary: Libraries for Apache Hadoop clients
Group: Applications/System
BuildArch: noarch
Requires: %{name}-common = %{version}-%{release}
@ -206,15 +208,15 @@ Requires: %{name}-mapreduce = %{version}-%{release}
Requires: %{name}-yarn = %{version}-%{release}
%description client
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package provides libraries for Hadoop clients.
This package provides libraries for Apache Hadoop clients.
%package common
Summary: Common files needed by Hadoop daemons
Summary: Common files needed by Apache Hadoop daemons
Group: Applications/System
BuildArch: noarch
Requires: /usr/sbin/useradd
@ -255,38 +257,39 @@ Requires: txw2
Requires: which
%description common
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package contains common files and utilities needed by other Hadoop modules.
This package contains common files and utilities needed by other Apache
Hadoop modules.
%package common-native
Summary: The native Hadoop library file
Summary: The native Apache Hadoop library file
Group: Applications/System
Requires: %{name}-common = %{version}-%{release}
%description common-native
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package contains the native-hadoop library
%if %{package_libhdfs}
%package devel
Summary: Headers for Hadoop
Summary: Headers for Apache Hadoop
Group: Development/System
Requires: libhdfs%{?_isa} = %{version}-%{release}
%description devel
Header files for Hadoop's hdfs library and other utilities
Header files for Apache Hadoop's hdfs library and other utilities
%endif
%package hdfs
Summary: The Hadoop Distributed File System
Summary: The Apache Hadoop Distributed File System
Group: Applications/System
BuildArch: noarch
Requires: apache-commons-daemon-jsvc
@ -297,17 +300,17 @@ Requires(preun): systemd
Requires(postun): systemd
%description hdfs
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
The Hadoop Distributed File System (HDFS) is the primary storage system
used by Hadoop applications.
used by Apache Hadoop applications.
%if %{package_libhdfs}
%package hdfs-fuse
Summary: Allows mounting of Hadoop HDFS
Summary: Allows mounting of Apache Hadoop HDFS
Group: Development/Libraries
Requires: fuse
Requires: libhdfs%{?_isa} = %{version}-%{release}
@ -317,9 +320,9 @@ Requires: %{name}-mapreduce = %{version}-%{release}
Requires: %{name}-yarn = %{version}-%{release}
%description hdfs-fuse
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package provides tools that allow HDFS to be mounted as a standard
@ -341,9 +344,9 @@ Requires(preun): systemd
Requires(postun): systemd
%description httpfs
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package provides a server that provides HTTP REST API support for
@ -354,32 +357,32 @@ the complete FileSystem/FileContext interface in HDFS.
# most architectures so only generate on intel 64-bit
%ifarch x86_64
%package javadoc
Summary: Javadoc for Hadoop
Summary: Javadoc for Apache Hadoop
Group: Documentation
BuildArch: noarch
%description javadoc
This package contains the API documentation for %{name}
This package contains the API documentation for %{name}.
%endif
%if %{package_libhdfs}
%package -n libhdfs
Summary: The Hadoop Filesystem Library
Summary: The Apache Hadoop Filesystem Library
Group: Development/Libraries
Requires: %{name}-hdfs = %{version}-%{release}
Requires: lzo
%description -n libhdfs
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package provides the Hadoop Filesystem Library.
This package provides the Apache Hadoop Filesystem Library.
%endif
%package mapreduce
Summary: Hadoop MapReduce (MRv2)
Summary: Apache Hadoop MapReduce (MRv2)
Group: Applications/System
BuildArch: noarch
Requires(pre): %{name}-common = %{version}-%{release}
@ -388,15 +391,15 @@ Requires(preun): systemd
Requires(postun): systemd
%description mapreduce
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package provides Hadoop MapReduce (MRv2).
This package provides Apache Hadoop MapReduce (MRv2).
%package mapreduce-examples
Summary: Hadoop MapReduce (MRv2) examples
Summary: Apache Hadoop MapReduce (MRv2) examples
Group: Applications/System
BuildArch: noarch
Requires: %{name}-mapreduce = %{version}-%{release}
@ -406,16 +409,16 @@ Requires: hsqldb
This package contains mapreduce examples.
%package maven-plugin
Summary: Hadoop maven plugin
Summary: Apache Hadoop maven plugin
Group: Development/Libraries
BuildArch: noarch
Requires: maven
%description maven-plugin
The Hadoop maven plugin
The Apache Hadoop maven plugin
%package tests
Summary: Hadoop test resources
Summary: Apache Hadoop test resources
BuildArch: noarch
Requires: %{name}-common = %{version}-%{release}
Requires: %{name}-hdfs = %{version}-%{release}
@ -423,15 +426,15 @@ Requires: %{name}-mapreduce = %{version}-%{release}
Requires: %{name}-yarn = %{version}-%{release}
%description tests
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package contains test related resources for Hadoop.
This package contains test related resources for Apache Hadoop.
%package yarn
Summary: Hadoop YARN
Summary: Apache Hadoop YARN
Group: Applications/System
BuildArch: noarch
Requires(pre): %{name}-common = %{version}-%{release}
@ -443,26 +446,25 @@ Requires(preun): systemd
Requires(postun): systemd
%description yarn
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package contains Hadoop YARN.
This package contains Apache Hadoop YARN.
%package yarn-security
Summary: The ability to run Hadoop YARN in secure mode
Summary: The ability to run Apache Hadoop YARN in secure mode
Group: Applications/System
Requires: %{name}-yarn = %{version}-%{release}
%description yarn-security
Hadoop is a framework that allows for the distributed processing of large data
sets across clusters of computers using simple programming models. It is
designed to scale up from single servers to thousands of machines, each
offering local computation and storage. YARN (Hadoop NextGen MapReduce) is
a general purpose data-computation framework.
Apache Hadoop is a framework that allows for the distributed processing of
large data sets across clusters of computers using simple programming models.
It is designed to scale up from single servers to thousands of machines, each
offering local computation and storage.
This package contains files needed to run Hadoop YARN in secure mode.
This package contains files needed to run Apache Hadoop YARN in secure mode.
%prep
%setup -qn %{name}-common-%{commit}
@ -525,33 +527,35 @@ This package contains files needed to run Hadoop YARN in secure mode.
%mvn_package :%{name}-project-dist __noinstall
# Create separate file lists for packaging
%mvn_package :::tests: hadoop-tests
%mvn_package :%{name}-client*::{}: hadoop-client
%mvn_package :%{name}-hdfs*::{}: hadoop-hdfs
%mvn_package :%{name}-mapreduce-examples*::{}: hadoop-mapreduce-examples
%mvn_package :%{name}-mapreduce*::{}: hadoop-mapreduce
%mvn_package :%{name}-archives::{}: hadoop-mapreduce
%mvn_package :%{name}-datajoin::{}: hadoop-mapreduce
%mvn_package :%{name}-distcp::{}: hadoop-mapreduce
%mvn_package :%{name}-extras::{}: hadoop-mapreduce
%mvn_package :%{name}-gridmix::{}: hadoop-mapreduce
%mvn_package :%{name}-rumen::{}: hadoop-mapreduce
%mvn_package :%{name}-streaming::{}: hadoop-mapreduce
%mvn_package :%{name}-pipes::{}: hadoop-mapreduce
%mvn_package :%{name}-tools*::{}: hadoop-mapreduce
%mvn_package :%{name}-maven-plugins::{}: hadoop-maven-plugin
%mvn_package :%{name}-minicluster::{}: hadoop-tests
%mvn_package :%{name}-yarn*::{}: hadoop-yarn
%mvn_package :::tests: %{name}-tests
%mvn_package :%{name}-client*::{}: %{name}-client
%mvn_package :%{name}-hdfs*::{}: %{name}-hdfs
%mvn_package :%{name}-mapreduce-examples*::{}: %{name}-mapreduce-examples
%mvn_package :%{name}-mapreduce*::{}: %{name}-mapreduce
%mvn_package :%{name}-archives::{}: %{name}-mapreduce
%mvn_package :%{name}-datajoin::{}: %{name}-mapreduce
%mvn_package :%{name}-distcp::{}: %{name}-mapreduce
%mvn_package :%{name}-extras::{}: %{name}-mapreduce
%mvn_package :%{name}-gridmix::{}: %{name}-mapreduce
%mvn_package :%{name}-rumen::{}: %{name}-mapreduce
%mvn_package :%{name}-streaming::{}: %{name}-mapreduce
%mvn_package :%{name}-pipes::{}: %{name}-mapreduce
%mvn_package :%{name}-tools*::{}: %{name}-mapreduce
%mvn_package :%{name}-maven-plugins::{}: %{name}-maven-plugin
%mvn_package :%{name}-minicluster::{}: %{name}-tests
%mvn_package :%{name}-yarn*::{}: %{name}-yarn
# Jar files that need to be overridden due to installation location
%mvn_file :%{name}-common::{}: %{_jnidir}/%{name}-common %{_datadir}/%{name}/common/%{name}-common
%mvn_file :%{name}-common::tests: %{name}/%{name}-common-tests
# Workaround for bz1023116
#%%mvn_file :%{name}-common::{}: %{_jnidir}/%{name}-common %{_datadir}/%{name}/common/%{name}-common
%mvn_file :%{name}-common::{}: %{_jnidir}/%{name}-common
%mvn_file :%{name}-common::tests: %{name}/%{name}-common
%build
%ifnarch x86_64
opts="-j"
%endif
%mvn_build $opts -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/hadoop -Pdist,native -DskipTests -DskipTest -DskipIT
%mvn_build $opts -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT
# This takes a long time to run, so comment out for now
#%%check
@ -690,11 +694,11 @@ find %{buildroot} -name *.cmd | xargs rm -f
# Modify hadoop-env.sh to point to correct locations for JAVA_HOME
# and JSVC_HOME.
sed -i "s|\${JAVA_HOME}|/usr/lib/jvm/jre|" %{buildroot}/%{_sysconfdir}/%{name}/hadoop-env.sh
sed -i "s|\${JSVC_HOME}|/usr/bin|" %{buildroot}/%{_sysconfdir}/%{name}/hadoop-env.sh
sed -i "s|\${JAVA_HOME}|/usr/lib/jvm/jre|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
sed -i "s|\${JSVC_HOME}|/usr/bin|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
# Ensure the java provided DocumentBuilderFactory is used
sed -i "s|\(HADOOP_OPTS.*=.*\)\$HADOOP_CLIENT_OPTS|\1 -Djavax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl \$HADOOP_CLIENT_OPTS|" %{buildroot}/%{_sysconfdir}/%{name}/hadoop-env.sh
sed -i "s|\(HADOOP_OPTS.*=.*\)\$HADOOP_CLIENT_OPTS|\1 -Djavax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl \$HADOOP_CLIENT_OPTS|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
echo "export YARN_OPTS=\"\$YARN_OPTS -Djavax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl\"" >> %{buildroot}/%{_sysconfdir}/%{name}/yarn-env.sh
# Workaround for bz1012059
@ -702,6 +706,9 @@ install -pm 644 %{name}-project-dist/target/%{name}-project-dist-%{hadoop_versio
install -pm 644 hadoop-project-dist/pom.xml %{buildroot}/%{_mavenpomdir}/JPP.%{name}-%{name}-project-dist.pom
%add_maven_depmap JPP.%{name}-%{name}-project-dist.pom %{name}/%{name}-project-dist.jar
# Workaround for bz1023116
%{__ln_s} %{_jnidir}/%{name}-common.jar %{buildroot}/%{_datadir}/%{name}/common
# client jar depenencies
copy_dep_jars %{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client/lib %{buildroot}/%{_datadir}/%{name}/client/lib
%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/client/lib
@ -746,7 +753,7 @@ cat > %{buildroot}/%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/META-INF/co
EOF
# Remove the jars included in the webapp and create symlinks
rm -f %{buildroot}%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/WEB-INF/lib/hadoop-common*.jar
rm -f %{buildroot}%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/WEB-INF/lib/%{name}-common*.jar
rm -f %{buildroot}%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/WEB-INF/lib/tools*.jar
%{_bindir}/xmvn-subst %{buildroot}/%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/WEB-INF/lib
%{__ln_s} %{_jnidir}/%{name}-common.jar %{buildroot}%{_sharedstatedir}/%{name}-httpfs/webapps/webhdfs/WEB-INF/lib
@ -810,7 +817,7 @@ if [ "$lib" = "%_libdir" ]; then
echo "_libdir is not located in /usr. Lib location is wrong"
exit 1
fi
sed -e "s|HADOOP_COMMON_LIB_NATIVE_DIR\s*=.*|HADOOP_COMMON_LIB_NATIVE_DIR=$lib/%{name}|" %{SOURCE1} > %{buildroot}/%{_libexecdir}/hadoop-layout.sh
sed -e "s|HADOOP_COMMON_LIB_NATIVE_DIR\s*=.*|HADOOP_COMMON_LIB_NATIVE_DIR=$lib/%{name}|" %{SOURCE1} > %{buildroot}/%{_libexecdir}/%{name}-layout.sh
# Default config
cp -f %{SOURCE8} %{buildroot}/%{_sysconfdir}/%{name}/core-site.xml
@ -853,11 +860,11 @@ do
done
# Ensure /var/run directories are recreated on boot
echo "d %{_var}/run/%{name}-yarn 0775 yarn hadoop -" > %{buildroot}/%{_tmpfilesdir}/hadoop-yarn.conf
echo "d %{_var}/run/%{name}-hdfs 0775 hdfs hadoop -" > %{buildroot}/%{_tmpfilesdir}/hadoop-hdfs.conf
echo "d %{_var}/run/%{name}-mapreduce 0775 mapred hadoop -" > %{buildroot}/%{_tmpfilesdir}/hadoop-mapreduce.conf
echo "d %{_var}/run/%{name}-yarn 0775 yarn hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-yarn.conf
echo "d %{_var}/run/%{name}-hdfs 0775 hdfs hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-hdfs.conf
echo "d %{_var}/run/%{name}-mapreduce 0775 mapred hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-mapreduce.conf
%if %{package_httpfs}
echo "d %{_var}/run/%{name}-httpfs 0775 httpfs hadoop -" > %{buildroot}/%{_tmpfilesdir}/hadoop-httpfs.conf
echo "d %{_var}/run/%{name}-httpfs 0775 httpfs hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-httpfs.conf
%endif
# logrotate config
@ -889,21 +896,21 @@ getent group hadoop >/dev/null || groupadd -r hadoop
%pre hdfs
getent group hdfs >/dev/null || groupadd -r hdfs
getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HDFS" --shell /sbin/nologin -M -r -g hdfs -G hadoop --home %{_var}/cache/%{name}-hdfs hdfs
getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop HDFS" --shell /sbin/nologin -M -r -g hdfs -G hadoop --home %{_var}/cache/%{name}-hdfs hdfs
%if %{package_httpfs}
%pre httpfs
getent group httpfs >/dev/null || groupadd -r httpfs
getent passwd httpfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HTTPFS" --shell /sbin/nologin -M -r -g httpfs -G httpfs --home %{_var}/run/%{name}-httpfs httpfs
getent passwd httpfs >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop HTTPFS" --shell /sbin/nologin -M -r -g httpfs -G httpfs --home %{_var}/run/%{name}-httpfs httpfs
%endif
%pre mapreduce
getent group mapred >/dev/null || groupadd -r mapred
getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Hadoop MapReduce" --shell /sbin/nologin -M -r -g mapred -G hadoop --home %{_var}/cache/%{name}-mapreduce mapred
getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop MapReduce" --shell /sbin/nologin -M -r -g mapred -G hadoop --home %{_var}/cache/%{name}-mapreduce mapred
%pre yarn
getent group yarn >/dev/null || groupadd -r yarn
getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --shell /sbin/nologin -M -r -g yarn -G hadoop --home %{_var}/cache/%{name}-yarn yarn
getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn" --shell /sbin/nologin -M -r -g yarn -G hadoop --home %{_var}/cache/%{name}-yarn yarn
%preun hdfs
%systemd_preun %{hdfs_services}
@ -919,7 +926,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%preun yarn
%systemd_preun %{yarn_services}
%post common -p /sbin/ldconfig
%post common-native -p /sbin/ldconfig
%post hdfs
%systemd_post %{hdfs_services}
@ -939,7 +946,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%post yarn
%systemd_post %{yarn_services}
%postun common -p /sbin/ldconfig
%postun common-native -p /sbin/ldconfig
%postun hdfs
%systemd_postun_with_restart %{hdfs_services}
@ -959,7 +966,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%postun yarn
%systemd_postun_with_restart %{yarn_services}
%files -f .mfiles-hadoop-client client
%files -f .mfiles-%{name}-client client
%{_datadir}/%{name}/client
%files -f .mfiles common
@ -976,6 +983,10 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%config(noreplace) %{_sysconfdir}/%{name}/ssl-server.xml.example
%dir %{_datadir}/%{name}
%dir %{_datadir}/%{name}/common
# Workaround for bz1023116
%{_datadir}/%{name}/common/%{name}-common.jar
%{_datadir}/%{name}/common/lib
%{_libexecdir}/%{name}-config.sh
%{_libexecdir}/%{name}-layout.sh
@ -1001,7 +1012,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%{_libdir}/libhdfs.so
%endif
%files -f .mfiles-hadoop-hdfs hdfs
%files -f .mfiles-%{name}-hdfs hdfs
%exclude %{_datadir}/%{name}/client
%config(noreplace) %{_sysconfdir}/%{name}/hdfs-site.xml
%config(noreplace) %{_sysconfdir}/security/limits.d/hdfs.conf
@ -1066,7 +1077,7 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%{_libdir}/libhdfs.so.*
%endif
%files -f .mfiles-hadoop-mapreduce mapreduce
%files -f .mfiles-%{name}-mapreduce mapreduce
%exclude %{_datadir}/%{name}/client
%config(noreplace) %{_sysconfdir}/%{name}/mapred-env.sh
%config(noreplace) %{_sysconfdir}/%{name}/mapred-queues.xml.template
@ -1084,14 +1095,14 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%attr(0755,mapred,hadoop) %dir %{_var}/log/%{name}-mapreduce
%attr(0755,mapred,hadoop) %dir %{_var}/cache/%{name}-mapreduce
%files -f .mfiles-hadoop-mapreduce-examples mapreduce-examples
%files -f .mfiles-%{name}-mapreduce-examples mapreduce-examples
%files -f .mfiles-hadoop-maven-plugin maven-plugin
%files -f .mfiles-%{name}-maven-plugin maven-plugin
%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/LICENSE.txt
%files -f .mfiles-hadoop-tests tests
%files -f .mfiles-%{name}-tests tests
%files -f .mfiles-hadoop-yarn yarn
%files -f .mfiles-%{name}-yarn yarn
%exclude %{_datadir}/%{name}/client
%config(noreplace) %{_sysconfdir}/%{name}/capacity-scheduler.xml
%config(noreplace) %{_sysconfdir}/%{name}/yarn-env.sh
@ -1119,6 +1130,13 @@ getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --she
%attr(6050,root,yarn) %{_bindir}/container-executor
%changelog
* Tue Dec 3 2013 Robert Rati <rrati@redhat> - 2.2.0-2
- Changed provides filter to just filter the .so
- Corrected naming of hadoop-common test jar
- Removed jline BuildRequires
- Moved pre/port install invocation of ldconfig to common-native
- Added workaround for bz1023116
* Wed Oct 23 2013 Robert Rati <rrati@redhat> - 2.2.0-1
- Update to upstream 2.2.0
- New patch to open libjvm with dlopen