Update to 2.7.3

This commit is contained in:
Mike Miller 2016-12-21 15:02:57 -05:00 committed by Mike Miller
parent 13f360ee07
commit fe5fa11d25
15 changed files with 858 additions and 1714 deletions

View File

@ -1,50 +0,0 @@
--- a/hadoop-project/pom.xml 2016-10-22 19:21:40.448895211 -0400
+++ b/hadoop-project/pom.xml 2016-10-22 19:32:08.923535480 -0400
@@ -919,6 +919,20 @@
</goals>
<phase>pre-site</phase>
</execution>
+ <execution>
+ <id>depcheck</id>
+ <configuration>
+ <rules>
+ <DependencyConvergence>
+ <uniqueVersions>true</uniqueVersions>
+ </DependencyConvergence>
+ </rules>
+ </configuration>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <phase>verify</phase>
+ </execution>
</executions>
</plugin>
<plugin>
@@ -969,26 +983,6 @@
<includeReports>false</includeReports>
</configuration>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-enforcer-plugin</artifactId>
- <executions>
- <execution>
- <id>depcheck</id>
- <configuration>
- <rules>
- <DependencyConvergence>
- <uniqueVersions>true</uniqueVersions>
- </DependencyConvergence>
- </rules>
- </configuration>
- <goals>
- <goal>enforce</goal>
- </goals>
- <phase>verify</phase>
- </execution>
- </executions>
- </plugin>
</plugins>
</build>

View File

@ -1,284 +0,0 @@
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-common-project/hadoop-common/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/pom.xml 2015-09-10 04:13:59.016972031 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-common-project/hadoop-common/pom.xml 2015-09-10 03:53:51.902302395 +0200
@@ -112,22 +112,26 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<!-- Used, even though 'mvn dependency:analyze' doesn't find it -->
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/pom.xml 2015-09-10 04:13:56.945073866 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs/pom.xml 2015-09-10 03:55:29.757492758 +0200
@@ -83,11 +83,13 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml 2015-09-10 04:13:59.019971884 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml 2015-09-10 03:56:00.339989611 +0200
@@ -67,11 +67,13 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml 2015-09-10 04:13:56.945073866 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml 2015-09-10 03:56:32.350416281 +0200
@@ -97,11 +97,13 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-mapreduce-project/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-mapreduce-project/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-mapreduce-project/pom.xml 2015-09-10 04:13:56.999071212 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-mapreduce-project/pom.xml 2015-09-10 03:52:35.657049893 +0200
@@ -128,6 +128,7 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<exclusions>
<exclusion>
<groupId>asm</groupId>
@@ -138,10 +139,12 @@
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-project/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-project/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-project/pom.xml 2015-09-10 04:13:59.038970950 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-project/pom.xml 2015-09-10 03:46:03.557321815 +0200
@@ -59,7 +59,7 @@
<avro.version>1.7.4</avro.version>
<!-- jersey version -->
- <jersey.version>1.17.1</jersey.version>
+ <jersey.version>1</jersey.version>
<!-- ProtocolBuffer version, used to verify the protoc version and -->
<!-- define the protobuf JAR version -->
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml 2015-09-10 04:13:57.003071015 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml 2015-09-10 03:47:14.870816716 +0200
@@ -78,6 +78,7 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 2015-09-10 04:13:57.013070524 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 2015-09-10 03:46:50.182030184 +0200
@@ -83,6 +83,7 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
@@ -147,6 +148,7 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>${jersey.version}</version>
<exclusions>
<exclusion>
<groupId>asm</groupId>
@@ -157,10 +159,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml 2015-09-10 04:13:57.013070524 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml 2015-09-10 03:48:28.283208456 +0200
@@ -99,15 +99,18 @@
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
@@ -137,10 +140,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml 2015-09-10 04:13:57.013070524 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml 2015-09-10 03:49:21.079613483 +0200
@@ -89,10 +89,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
@@ -148,15 +150,18 @@
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml 2015-09-10 04:13:57.022070082 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml 2015-09-10 03:50:18.954768886 +0200
@@ -109,15 +109,18 @@
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
<dependency>
@@ -151,10 +154,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
@@ -210,6 +215,7 @@
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml 2015-09-10 04:13:57.026069885 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml 2015-09-10 03:51:11.787172144 +0200
@@ -119,6 +119,7 @@
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>

View File

@ -1,81 +0,0 @@
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java 2014-06-30 09:04:57.000000000 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java 2015-03-14 15:37:19.582587031 +0100
@@ -91,17 +91,17 @@
S3Credentials s3Credentials = new S3Credentials();
s3Credentials.initialize(uri, conf);
- try {
+ //try {
AWSCredentials awsCredentials =
new AWSCredentials(s3Credentials.getAccessKey(),
s3Credentials.getSecretAccessKey());
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
- if (e.getCause() instanceof IOException) {
- throw (IOException) e.getCause();
- }
- throw new S3Exception(e);
- }
+ // } catch (S3ServiceException e) {
+ // if (e.getCause() instanceof IOException) {
+ // throw (IOException) e.getCause();
+ // }
+ // throw new S3Exception(e);
+ // }
bucket = new S3Bucket(uri.getHost());
this.bufferSize = conf.getInt(
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java 2014-06-30 09:04:57.000000000 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java 2015-03-14 15:50:35.036095902 +0100
@@ -117,7 +117,7 @@
- try {
+ //try {
String accessKey = null;
String secretAccessKey = null;
String userInfo = uri.getUserInfo();
@@ -158,12 +158,12 @@
AWSCredentials awsCredentials =
new AWSCredentials(accessKey, secretAccessKey);
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
- if (e.getCause() instanceof IOException) {
- throw (IOException) e.getCause();
- }
- throw new S3Exception(e);
- }
+ //} catch (S3ServiceException e) {
+ // if (e.getCause() instanceof IOException) {
+ // throw (IOException) e.getCause();
+ // }
+ // throw new S3Exception(e);
+ //}
bucket = new S3Bucket(uri.getHost());
}
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java 2014-06-30 09:04:57.000000000 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java 2015-03-14 15:24:05.397371065 +0100
@@ -71,14 +71,14 @@
public void initialize(URI uri, Configuration conf) throws IOException {
S3Credentials s3Credentials = new S3Credentials();
s3Credentials.initialize(uri, conf);
- try {
+ //try {
AWSCredentials awsCredentials =
new AWSCredentials(s3Credentials.getAccessKey(),
s3Credentials.getSecretAccessKey());
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
- handleS3ServiceException(e);
- }
+ //} catch (S3ServiceException e) {
+ // handleS3ServiceException(e);
+ //}
multipartEnabled =
conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
multipartBlockSize = Math.min(

View File

@ -1,13 +0,0 @@
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.bookkeeper/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java 2014-06-30 09:04:57.000000000 +0200
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.bookkeeper/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java 2016-01-09 13:43:26.831773352 +0100
@@ -237,7 +237,7 @@
zkPathLatch.countDown();
}
};
- ZkUtils.createFullPathOptimistic(zkc, zkAvailablePath, new byte[0],
+ ZkUtils.asyncCreateFullPathOptimistic(zkc, zkAvailablePath, new byte[0],
Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT, callback, null);
try {

View File

@ -1,18 +0,0 @@
diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.servlet/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
--- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java 2015-03-20 04:45:08.415241957 +0100
+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.servlet/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java 2015-03-14 16:33:12.627551779 +0100
@@ -308,5 +308,14 @@
public void write(int b) throws IOException {
buffer.append((char) b);
}
+
+ public void setWriteListener(javax.servlet.WriteListener listener) {
+ throw new UnsupportedOperationException("Not implemented yet.");
+ }
+
+ public boolean isReady() {
+ return false;
+ }
+
}
}

60
hadoop-aws.patch Normal file
View File

@ -0,0 +1,60 @@
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
index 901f89b..3a44a01 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
@@ -91,17 +91,10 @@ class Jets3tFileSystemStore implements FileSystemStore {
S3Credentials s3Credentials = new S3Credentials();
s3Credentials.initialize(uri, conf);
- try {
AWSCredentials awsCredentials =
new AWSCredentials(s3Credentials.getAccessKey(),
s3Credentials.getSecretAccessKey());
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
- if (e.getCause() instanceof IOException) {
- throw (IOException) e.getCause();
- }
- throw new S3Exception(e);
- }
bucket = new S3Bucket(uri.getHost());
this.bufferSize = conf.getInt(
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
index 429c272..411bd53 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
@@ -117,7 +117,6 @@ public class MigrationTool extends Configured implements Tool {
- try {
String accessKey = null;
String secretAccessKey = null;
String userInfo = uri.getUserInfo();
@@ -158,12 +157,6 @@ public class MigrationTool extends Configured implements Tool {
AWSCredentials awsCredentials =
new AWSCredentials(accessKey, secretAccessKey);
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
- if (e.getCause() instanceof IOException) {
- throw (IOException) e.getCause();
- }
- throw new S3Exception(e);
- }
bucket = new S3Bucket(uri.getHost());
}
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
index a10d6f2..3e2fa38 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
@@ -83,7 +83,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore {
new AWSCredentials(s3Credentials.getAccessKey(),
s3Credentials.getSecretAccessKey());
this.s3Service = new RestS3Service(awsCredentials);
- } catch (S3ServiceException e) {
+ } catch (Exception e) {
handleException(e);
}
multipartEnabled =

View File

@ -14,13 +14,13 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt b/hadoop-hdfs-pr
index 82d1a32..2151bb8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
@@ -99,7 +99,6 @@ if (NEED_LINK_DL)
@@ -147,7 +147,6 @@ if (NEED_LINK_DL)
endif(NEED_LINK_DL)
target_link_dual_libraries(hdfs
- ${JAVA_JVM_LIBRARY}
${LIB_DL}
pthread
${OS_LINK_LIBRARIES}
)
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt
index dd3f1e6..68ba422 100644
@ -34,29 +34,52 @@ index dd3f1e6..68ba422 100644
hdfs
m
pthread
@@ -77,16 +77,6 @@ IF(FUSE_FOUND)
pthread
rt
)
- add_executable(test_fuse_dfs
- test/test_fuse_dfs.c
- test/fuse_workload.c
- )
- target_link_libraries(test_fuse_dfs
- ${FUSE_LIBRARIES}
- native_mini_dfs
- posix_util
- pthread
- )
ELSE(FUSE_FOUND)
IF(REQUIRE_FUSE)
MESSAGE(FATAL_ERROR "Required component fuse_dfs could not be built.")
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
index 878289f..62686b3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c
@@ -20,6 +20,7 @@
#include "exception.h"
#include "jni_helper.h"
@@ -24,6 +24,7 @@
#include "os/mutexes.h"
#include "os/thread_local_storage.h"
+#include <dlfcn.h>
#include <stdio.h>
#include <string.h>
@@ -442,10 +443,82 @@ static JNIEnv* getGlobalJNIEnv(void)
@@ -373,6 +374,12 @@ static JNIEnv* getGlobalJNIEnv(void)
jint rv = 0;
jint noVMs = 0;
jthrowable jthr;
+ void *jvmHandle = NULL;
+ jint JNICALL (*getCreatedJavaJVMsPtr)(JavaVM **, jsize, jsize *);
+ getCreatedJavaJVMsPtr = NULL;
+ getCreatedJavaJVMsPtr = NULL;
+ jint JNICALL (*createJavaVMPtr)(JavaVM **, void **, void *);
+ createJavaVMPtr = NULL;
+ char *dlsym_error = NULL;
+
char *hadoopClassPath;
const char *hadoopClassPathVMArg = "-Djava.class.path=";
size_t optHadoopClassPathLen;
@@ -384,10 +391,75 @@ static JNIEnv* getGlobalJNIEnv(void)
JavaVMInitArgs vm_args;
JavaVM *vm;
JavaVMOption *options;
+ //Get JAVA_HOME to use appropriate libjvm
+ char *javaHome = getenv("JAVA_HOME");
+ if (javaHome == NULL) {
@ -114,7 +137,7 @@ index 878289f..62686b3 100644
+ return NULL;
+ }
- rv = JNI_GetCreatedJavaVMs(&(vmBuf[0]), vmBufLength, &noVMs);
- rv = JNI_GetCreatedJavaVMs(&(vmBuf[0]), VM_BUF_LENGTH, &noVMs);
+ //Load the JNI_GetCreatedJavaVMs function from the libjvm library
+ getCreatedJavaJVMsPtr = (jint JNICALL (*)(JavaVM **, jsize, jsize *)) dlsym(jvmHandle, "JNI_GetCreatedJavaVMs");
+ dlsym_error = dlerror();
@ -123,21 +146,21 @@ index 878289f..62686b3 100644
+ dlclose(jvmHandle);
+ return NULL;
+ }
+ rv = (*getCreatedJavaJVMsPtr)(&(vmBuf[0]), vmBufLength, &noVMs);
+ rv = (*getCreatedJavaJVMsPtr)(&(vmBuf[0]), VM_BUF_LENGTH, &noVMs);
if (rv != 0) {
fprintf(stderr, "JNI_GetCreatedJavaVMs failed with error: %d\n", rv);
+ dlclose(jvmHandle);
return NULL;
}
@@ -454,6 +527,7 @@ static JNIEnv* getGlobalJNIEnv(void)
char *hadoopClassPath = getenv("CLASSPATH");
@@ -396,6 +468,7 @@ static JNIEnv* getGlobalJNIEnv(void)
hadoopClassPath = getenv("CLASSPATH");
if (hadoopClassPath == NULL) {
fprintf(stderr, "Environment variable CLASSPATH not set!\n");
+ dlclose(jvmHandle);
return NULL;
}
char *hadoopClassPathVMArg = "-Djava.class.path=";
optHadoopClassPathLen = strlen(hadoopClassPath) +
@@ -502,7 +576,15 @@ static JNIEnv* getGlobalJNIEnv(void)
vm_args.nOptions = noArgs;
vm_args.ignoreUnrecognized = 1;
@ -163,3 +186,57 @@ index 878289f..62686b3 100644
return NULL;
}
jthr = invokeMethod(env, NULL, STATIC, NULL,
@@ -470,6 +552,7 @@ static JNIEnv* getGlobalJNIEnv(void)
if (rv != 0) {
fprintf(stderr, "Call to AttachCurrentThread "
"failed with error: %d\n", rv);
+ dlclose(jvmHandle);
return NULL;
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
index 74f20dd..255d4ab 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt
@@ -161,7 +161,6 @@ add_executable(test_libhdfs_ops
)
target_link_libraries(test_libhdfs_ops
hdfs_static
- ${JAVA_JVM_LIBRARY}
)
add_executable(test_libhdfs_read
@@ -169,7 +168,6 @@ add_executable(test_libhdfs_read
)
target_link_libraries(test_libhdfs_read
hdfs_static
- ${JAVA_JVM_LIBRARY}
)
add_executable(test_libhdfs_write
@@ -177,7 +175,6 @@ add_executable(test_libhdfs_write
)
target_link_libraries(test_libhdfs_write
hdfs_static
- ${JAVA_JVM_LIBRARY}
)
add_library(native_mini_dfs
@@ -189,17 +186,9 @@ add_library(native_mini_dfs
${OS_DIR}/thread_local_storage.c
)
target_link_libraries(native_mini_dfs
- ${JAVA_JVM_LIBRARY}
${OS_LINK_LIBRARIES}
)
-add_executable(test_native_mini_dfs
- main/native/libhdfs/test_native_mini_dfs.c
-)
-target_link_libraries(test_native_mini_dfs
- native_mini_dfs
-)
-
add_executable(test_libhdfs_threaded
main/native/libhdfs/expect.c
main/native/libhdfs/test_libhdfs_threaded.c

File diff suppressed because it is too large Load Diff

View File

@ -1,411 +1,26 @@
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
index f7932a6..ec3d9cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
@@ -22,6 +22,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -153,7 +154,7 @@ public String toString() {
private class Monitor implements Runnable {
@Override
public void run() {
- Stopwatch sw = new Stopwatch();
+ Stopwatch sw = Stopwatch.createUnstarted();
Map<String, GcTimes> gcTimesBeforeSleep = getGcTimes();
while (shouldRun) {
sw.reset().start();
@@ -162,7 +163,7 @@ public void run() {
} catch (InterruptedException ie) {
return;
}
- long extraSleepTime = sw.elapsedMillis() - SLEEP_INTERVAL_MS;
+ long extraSleepTime = sw.elapsed(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS;
Map<String, GcTimes> gcTimesAfterSleep = getGcTimes();
if (extraSleepTime > warnThresholdMs) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
index 8588de5..cb0dbae 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
@@ -133,7 +133,7 @@
/**
* Stopwatch which starts counting on each heartbeat that is sent
*/
- private final Stopwatch lastHeartbeatStopwatch = new Stopwatch();
+ private final Stopwatch lastHeartbeatStopwatch = Stopwatch.createUnstarted();
private static final long HEARTBEAT_INTERVAL_MILLIS = 1000;
@@ -435,7 +435,7 @@ private void throwIfOutOfSync()
* written.
*/
private void heartbeatIfNecessary() throws IOException {
- if (lastHeartbeatStopwatch.elapsedMillis() > HEARTBEAT_INTERVAL_MILLIS ||
+ if (lastHeartbeatStopwatch.elapsed(TimeUnit.MILLISECONDS) > HEARTBEAT_INTERVAL_MILLIS ||
!lastHeartbeatStopwatch.isRunning()) {
try {
getProxy().heartbeat(createReqInfo());
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
index c117ee8..82f01da 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
@@ -68,7 +68,6 @@
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Range;
-import com.google.common.collect.Ranges;
import com.google.protobuf.TextFormat;
/**
@@ -374,15 +373,15 @@ synchronized void journal(RequestInfo reqInfo,
curSegment.writeRaw(records, 0, records.length);
curSegment.setReadyToFlush();
- Stopwatch sw = new Stopwatch();
+ Stopwatch sw = Stopwatch.createUnstarted();
sw.start();
curSegment.flush(shouldFsync);
sw.stop();
- metrics.addSync(sw.elapsedTime(TimeUnit.MICROSECONDS));
- if (sw.elapsedTime(TimeUnit.MILLISECONDS) > WARN_SYNC_MILLIS_THRESHOLD) {
+ metrics.addSync(sw.elapsed(TimeUnit.MICROSECONDS));
+ if (sw.elapsed(TimeUnit.MILLISECONDS) > WARN_SYNC_MILLIS_THRESHOLD) {
LOG.warn("Sync of transaction range " + firstTxnId + "-" + lastTxnId +
- " took " + sw.elapsedTime(TimeUnit.MILLISECONDS) + "ms");
+ " took " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms");
}
if (isLagging) {
@@ -853,7 +852,7 @@ public synchronized void acceptRecovery(RequestInfo reqInfo,
private Range<Long> txnRange(SegmentStateProto seg) {
Preconditions.checkArgument(seg.hasEndTxId(),
"invalid segment: %s", seg);
- return Ranges.closed(seg.getStartTxId(), seg.getEndTxId());
+ return Range.closed(seg.getStartTxId(), seg.getEndTxId());
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
index 5075da9..0d868d4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
@@ -62,7 +62,7 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.google.common.io.LimitInputStream;
+import com.google.common.io.ByteStreams;
import com.google.protobuf.CodedOutputStream;
/**
@@ -215,7 +215,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
for (FileSummary.Section s : sections) {
channel.position(s.getOffset());
- InputStream in = new BufferedInputStream(new LimitInputStream(fin,
+ InputStream in = new BufferedInputStream(ByteStreams.limit(fin,
s.getLength()));
in = FSImageUtil.wrapInputStreamForCompression(conf,
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
index c8033dd..b312bfe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java
@@ -33,7 +33,7 @@
import org.apache.hadoop.io.IOUtils;
import com.google.common.base.Preconditions;
-import com.google.common.io.LimitInputStream;
+import com.google.common.io.ByteStreams;
/**
* This is the tool for analyzing file sizes in the namespace image. In order to
@@ -106,7 +106,7 @@ void visit(RandomAccessFile file) throws IOException {
in.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
- summary.getCodec(), new BufferedInputStream(new LimitInputStream(
+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
in, s.getLength())));
run(is);
output();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
index d80fcf1..e025f82 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java
@@ -50,7 +50,7 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.google.common.io.LimitInputStream;
+import com.google.common.io.ByteStreams;
/**
* LsrPBImage displays the blocks of the namespace in a format very similar
@@ -110,7 +110,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
for (FileSummary.Section s : sections) {
fin.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
- summary.getCodec(), new BufferedInputStream(new LimitInputStream(
+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
fin, s.getLength())));
switch (SectionName.fromString(s.getName())) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index 99617b8..c613591 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -52,7 +52,7 @@
import org.apache.hadoop.io.IOUtils;
import com.google.common.collect.Lists;
-import com.google.common.io.LimitInputStream;
+import com.google.common.io.ByteStreams;
/**
* PBImageXmlWriter walks over an fsimage structure and writes out
@@ -100,7 +100,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) {
for (FileSummary.Section s : sections) {
fin.getChannel().position(s.getOffset());
InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
- summary.getCodec(), new BufferedInputStream(new LimitInputStream(
+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit(
fin, s.getLength())));
switch (SectionName.fromString(s.getName())) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
index 132218c..09d42e1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java
@@ -47,7 +47,7 @@
import org.junit.Before;
import org.junit.Test;
-import com.google.common.io.NullOutputStream;
+import com.google.common.io.ByteStreams;
public class TestDataTransferKeepalive {
final Configuration conf = new HdfsConfiguration();
@@ -224,7 +224,7 @@ public void testManyClosedSocketsInCache() throws Exception {
stms[i] = fs.open(TEST_FILE);
}
for (InputStream stm : stms) {
- IOUtils.copyBytes(stm, new NullOutputStream(), 1024);
+ IOUtils.copyBytes(stm, ByteStreams.nullOutputStream(), 1024);
}
} finally {
IOUtils.cleanup(null, stms);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java
index 92c7672..aa5c351 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java
@@ -100,10 +100,10 @@ public void run() {
}
private void doAWrite() throws IOException {
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
stm.write(toWrite);
stm.hflush();
- long micros = sw.elapsedTime(TimeUnit.MICROSECONDS);
+ long micros = sw.elapsed(TimeUnit.MICROSECONDS);
quantiles.insert(micros);
}
}
@@ -276,12 +276,12 @@ public int run(String args[]) throws Exception {
int replication = conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY,
DFSConfigKeys.DFS_REPLICATION_DEFAULT);
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
test.doMultithreadedWrites(conf, p, numThreads, writeSize, numWrites,
replication);
sw.stop();
- System.out.println("Finished in " + sw.elapsedMillis() + "ms");
+ System.out.println("Finished in " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms");
System.out.println("Latency quantiles (in microseconds):\n" +
test.quantiles);
return 0;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
index 10b6b79..9fbcf82 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
@@ -27,6 +27,7 @@
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
@@ -325,11 +326,11 @@ private void doPerfTest(int editsSize, int numEdits) throws Exception {
ch.setEpoch(1);
ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
for (int i = 1; i < numEdits; i++) {
ch.sendEdits(1L, i, 1, data).get();
}
- long time = sw.elapsedMillis();
+ long time = sw.elapsed(TimeUnit.MILLISECONDS);
System.err.println("Wrote " + numEdits + " batches of " + editsSize +
" bytes in " + time + "ms");
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java
index a1e49cc..44751b0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java
@@ -20,6 +20,7 @@
import static org.junit.Assert.*;
import java.util.ArrayList;
+import java.util.concurrent.TimeUnit;
import org.junit.Test;
@@ -69,24 +70,22 @@ public void testPerformance() {
System.gc();
{
ArrayList<String> arrayList = new ArrayList<String>();
- Stopwatch sw = new Stopwatch();
- sw.start();
+ Stopwatch sw = Stopwatch.createStarted();
for (int i = 0; i < numElems; i++) {
arrayList.add(obj);
}
- System.out.println(" ArrayList " + sw.elapsedMillis());
+ System.out.println(" ArrayList " + sw.elapsed(TimeUnit.MILLISECONDS));
}
// test ChunkedArrayList
System.gc();
{
ChunkedArrayList<String> chunkedList = new ChunkedArrayList<String>();
- Stopwatch sw = new Stopwatch();
- sw.start();
+ Stopwatch sw = Stopwatch.createStarted();
for (int i = 0; i < numElems; i++) {
chunkedList.add(obj);
}
- System.out.println("ChunkedArrayList " + sw.elapsedMillis());
+ System.out.println("ChunkedArrayList " + sw.elapsed(TimeUnit.MILLISECONDS));
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
index 9863427..07854a1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
@@ -28,6 +28,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -223,7 +224,7 @@ protected void addInputPathRecursively(List<FileStatus> result,
org.apache.hadoop.mapreduce.lib.input.FileInputFormat.LIST_STATUS_NUM_THREADS,
org.apache.hadoop.mapreduce.lib.input.FileInputFormat.DEFAULT_LIST_STATUS_NUM_THREADS);
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
if (numThreads == 1) {
List<FileStatus> locatedFiles = singleThreadedListStatus(job, dirs, inputFilter, recursive);
result = locatedFiles.toArray(new FileStatus[locatedFiles.size()]);
@@ -242,7 +243,7 @@ protected void addInputPathRecursively(List<FileStatus> result,
sw.stop();
if (LOG.isDebugEnabled()) {
- LOG.debug("Time taken to get FileStatuses: " + sw.elapsedMillis());
+ LOG.debug("Time taken to get FileStatuses: " + sw.elapsed(TimeUnit.MILLISECONDS));
}
LOG.info("Total input paths to process : " + result.length);
return result;
@@ -300,7 +301,7 @@ protected FileSplit makeSplit(Path file, long start, long length,
* they're too big.*/
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
FileStatus[] files = listStatus(job);
// Save the number of input files for metrics/loadgen
@@ -362,7 +363,7 @@ protected FileSplit makeSplit(Path file, long start, long length,
sw.stop();
if (LOG.isDebugEnabled()) {
LOG.debug("Total # of splits generated by getSplits: " + splits.size()
- + ", TimeTaken: " + sw.elapsedMillis());
+ + ", TimeTaken: " + sw.elapsed(TimeUnit.MILLISECONDS));
}
return splits.toArray(new FileSplit[splits.size()]);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
index 5f32f11..a4f293c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -258,7 +259,7 @@ public static PathFilter getInputPathFilter(JobContext context) {
int numThreads = job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS,
DEFAULT_LIST_STATUS_NUM_THREADS);
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
if (numThreads == 1) {
result = singleThreadedListStatus(job, dirs, inputFilter, recursive);
} else {
@@ -275,7 +276,7 @@ public static PathFilter getInputPathFilter(JobContext context) {
sw.stop();
if (LOG.isDebugEnabled()) {
- LOG.debug("Time taken to get FileStatuses: " + sw.elapsedMillis());
+ LOG.debug("Time taken to get FileStatuses: " + sw.elapsed(TimeUnit.MILLISECONDS));
}
LOG.info("Total input paths to process : " + result.size());
return result;
@@ -366,7 +367,7 @@ protected FileSplit makeSplit(Path file, long start, long length,
* @throws IOException
*/
public List<InputSplit> getSplits(JobContext job) throws IOException {
- Stopwatch sw = new Stopwatch().start();
+ Stopwatch sw = Stopwatch.createStarted();
long minSize = Math.max(getFormatMinSplitSize(), getMinSplitSize(job));
long maxSize = getMaxSplitSize(job);
@@ -414,7 +415,7 @@ protected FileSplit makeSplit(Path file, long start, long length,
sw.stop();
if (LOG.isDebugEnabled()) {
LOG.debug("Total # of splits generated by getSplits: " + splits.size()
- + ", TimeTaken: " + sw.elapsedMillis());
+ + ", TimeTaken: " + sw.elapsed(TimeUnit.MILLISECONDS));
}
return splits;
}
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index b315e2b..9ad8bcd 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -310,7 +310,7 @@
@@ -400,7 +400,7 @@
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
- <version>11.0.2</version>
+ <version>17.0</version>
+ <version>18.0</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<groupId>com.google.code.gson</groupId>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
index d55c80b..4505aa9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
@@ -67,7 +67,7 @@ class XAttrCommands extends FsCommand {
"0x and 0s, respectively.\n" +
"<path>: The file or directory.\n";
private final static Function<String, XAttrCodec> enValueOfFunc =
- Enums.valueOfFunction(XAttrCodec.class);
+ Enums.stringConverter(XAttrCodec.class);
private String name = null;
private boolean dump = false;

13
hadoop-jersey1.patch Normal file
View File

@ -0,0 +1,13 @@
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index b646304..97ac7c2 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -64,7 +64,7 @@
<avro.version>1.7.4</avro.version>
<!-- jersey version -->
- <jersey.version>1.9</jersey.version>
+ <jersey.version>1</jersey.version>
<!-- jackson versions -->
<jackson.version>1.9.13</jackson.version>

View File

@ -1,44 +0,0 @@
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 7cf67a3..c090916 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -364,16 +364,6 @@
</executions>
</plugin>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <systemPropertyVariables>
- <startKdc>${startKdc}</startKdc>
- <kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
- </systemPropertyVariables>
- </configuration>
- </plugin>
- <plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
<executions>
@@ -480,6 +470,10 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
+ <systemPropertyVariables>
+ <startKdc>${startKdc}</startKdc>
+ <kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
+ </systemPropertyVariables>
<properties>
<property>
<name>listener</name>
diff --git a/pom.xml b/pom.xml
index 13dbf49..ad84034 100644
--- a/pom.xml
+++ b/pom.xml
@@ -387,6 +387,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
+ <version>2.8.1</version>
<inherited>false</inherited>
<executions>
<execution>

View File

@ -1,21 +1,3 @@
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
index 9b267fe..0ce916d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
@@ -38,12 +38,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<dependencyManagement>
<dependencies>
- <!-- This is a really old version of netty, that gets privatized
- via shading and hence it is not managed via a parent pom -->
<dependency>
<groupId>org.jboss.netty</groupId>
<artifactId>netty</artifactId>
- <version>3.2.4.Final</version>
+ <version>3.9.3.Final</version>
</dependency>
</dependencies>
</dependencyManagement>
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index b315e2b..a9da3aa 100644
--- a/hadoop-project/pom.xml
@ -25,7 +7,7 @@ index b315e2b..a9da3aa 100644
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
- <version>3.6.2.Final</version>
+ <version>3.9.3.Final</version>
+ <version>3.10.6.Final</version>
</dependency>
<dependency>

37
hadoop-openssl.patch Normal file
View File

@ -0,0 +1,37 @@
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
index 5cb5bba..5294ec7 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
+++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
@@ -253,14 +253,15 @@ JNIEXPORT jlong JNICALL Java_org_apache_hadoop_crypto_OpensslCipher_init
static int check_update_max_output_len(EVP_CIPHER_CTX *context, int input_len,
int max_output_len)
{
- if (context->flags & EVP_CIPH_NO_PADDING) {
+ unsigned long flags = EVP_CIPHER_flags(EVP_CIPHER_CTX_cipher(context));
+ if (flags & EVP_CIPH_NO_PADDING) {
if (max_output_len >= input_len) {
return 1;
}
return 0;
} else {
- int b = context->cipher->block_size;
- if (context->encrypt) {
+ int b = EVP_CIPHER_CTX_block_size(context);
+ if (EVP_CIPHER_CTX_encrypting(context)) {
if (max_output_len >= input_len + b - 1) {
return 1;
}
@@ -307,10 +308,11 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_crypto_OpensslCipher_update
static int check_doFinal_max_output_len(EVP_CIPHER_CTX *context,
int max_output_len)
{
- if (context->flags & EVP_CIPH_NO_PADDING) {
+ unsigned long flags = EVP_CIPHER_flags(EVP_CIPHER_CTX_cipher(context));
+ if (flags & EVP_CIPH_NO_PADDING) {
return 1;
} else {
- int b = context->cipher->block_size;
+ int b = EVP_CIPHER_CTX_block_size(context);
if (max_output_len >= b) {
return 1;
}

View File

@ -2,21 +2,7 @@ diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-pr
index c3e1aa1..9042f73 100644
--- a/hadoop-common-project/hadoop-annotations/pom.xml
+++ b/hadoop-common-project/hadoop-annotations/pom.xml
@@ -48,11 +48,8 @@
</activation>
<dependencies>
<dependency>
- <groupId>jdk.tools</groupId>
- <artifactId>jdk.tools</artifactId>
- <version>1.6</version>
- <scope>system</scope>
- <systemPath>${java.home}/../lib/tools.jar</systemPath>
+ <groupId>com.sun</groupId>
+ <artifactId>tools</artifactId>
</dependency>
</dependencies>
</profile>
@@ -63,11 +60,8 @@
@@ -46,11 +46,8 @@
</activation>
<dependencies>
<dependency>
@ -30,3 +16,17 @@ index c3e1aa1..9042f73 100644
</dependency>
</dependencies>
</profile>
@@ -61,11 +58,8 @@
</activation>
<dependencies>
<dependency>
- <groupId>jdk.tools</groupId>
- <artifactId>jdk.tools</artifactId>
- <version>1.8</version>
- <scope>system</scope>
- <systemPath>${java.home}/../lib/tools.jar</systemPath>
+ <groupId>com.sun</groupId>
+ <artifactId>tools</artifactId>
</dependency>
</dependencies>
</profile>

View File

@ -1,8 +1,5 @@
%global _hardened_build 1
%global commit 9e2ef43a240fb0f603d8c384e501daec11524510
%global shortcommit %(c=%{commit}; echo ${c:0:7})
%global hadoop_version %{version}
%global hdfs_services hadoop-zkfc.service hadoop-datanode.service hadoop-secondarynamenode.service hadoop-namenode.service hadoop-journalnode.service
%global mapreduce_services hadoop-historyserver.service
@ -13,14 +10,14 @@
%global __provides_exclude_from ^%{_libdir}/%{name}/.*$
Name: hadoop
Version: 2.4.1
Release: 26%{?dist}
Version: 2.7.3
Release: 1%{?dist}
Summary: A software platform for processing vast amounts of data
# The BSD license file is missing
# https://issues.apache.org/jira/browse/HADOOP-9849
License: ASL 2.0 and BSD
URL: https://%{name}.apache.org
Source0: https://github.com/apache/hadoop-common/archive/%{commit}/%{name}-%{version}-%{shortcommit}.tar.gz
Source0: https://www.apache.org/dist/%{name}/core/%{name}-%{version}/%{name}-%{version}-src.tar.gz
Source1: %{name}-layout.sh
Source2: %{name}-hdfs.service.template
Source3: %{name}-mapreduce.service.template
@ -42,13 +39,11 @@ Source14: %{name}-tomcat-users.xml
Patch0: %{name}-fedora-integration.patch
# Fedora packaging guidelines for JNI library loading
Patch2: %{name}-jni-library-loading.patch
# Clean up warnings with maven 3.0.5
Patch3: %{name}-maven.patch
# Don't download tomcat
Patch4: %{name}-no-download-tomcat.patch
# Use dlopen to find libjvm.so
Patch5: %{name}-dlopen-libjvm.patch
# Update to Guava 17.0
# Update to Guava 18.0
Patch7: %{name}-guava.patch
# Update to Netty 3.6.6-Final
Patch8: %{name}-netty-3-Final.patch
@ -60,21 +55,17 @@ Patch10: %{name}-build.patch
Patch12: %{name}-armhfp.patch
# fix Jersey1 support
Patch13: hadoop-2.4.1-jersey1.patch
Patch13: hadoop-jersey1.patch
# fix java8 doclint
Patch14: hadoop-2.4.1-disable-doclint.patch
# fix exception org.jets3t.service.S3ServiceException is never thrown in body of corresponding try statement
Patch15: hadoop-2.4.1-jets3t0.9.3.patch
# add some servlet3.1 missing methods
Patch16: hadoop-2.4.1-servlet-3.1-api.patch
# Adapt to the new BookKeeper ZkUtils API
Patch17: hadoop-2.4.1-new-bookkeeper.patch
# Fix POM warnings which become errors in newest Maven
Patch18: fix-pom-errors.patch
%if 0%{?fedora} > 25
# Fix Protobuf compiler errors after updating to 3.1.0
Patch19: protobuf3.patch
%endif
# Patch openssl 1.0.2 to use 1.1.0
Patch21: %{name}-openssl.patch
# fix exception no longer thrown in aws
Patch22: %{name}-aws.patch
# This is not a real BR, but is here because of rawhide shift to eclipse
# aether packages which caused a dependency of a dependency to not get
@ -97,16 +88,19 @@ BuildRequires: apache-commons-logging
BuildRequires: apache-commons-math
BuildRequires: apache-commons-net
BuildRequires: apache-rat-plugin
BuildRequires: apacheds-kerberos
BuildRequires: atinject
BuildRequires: avalon-framework
BuildRequires: avalon-logkit
BuildRequires: avro
BuildRequires: avro-maven-plugin
BuildRequires: aws-sdk-java
BuildRequires: bookkeeper-java
BuildRequires: cglib
BuildRequires: checkstyle
BuildRequires: chrpath
BuildRequires: cmake
BuildRequires: curator
BuildRequires: ecj >= 1:4.2.1-6
BuildRequires: fuse-devel
BuildRequires: fusesource-pom
@ -121,6 +115,7 @@ BuildRequires: guice-servlet
BuildRequires: hamcrest
BuildRequires: hawtjni
BuildRequires: hsqldb
BuildRequires: htrace
BuildRequires: httpcomponents-client
BuildRequires: httpcomponents-core
BuildRequires: istack-commons
@ -137,11 +132,13 @@ BuildRequires: jersey1-contribs
BuildRequires: jets3t
BuildRequires: jettison
BuildRequires: jetty8
BuildRequires: jetty-util-ajax
BuildRequires: jsch
BuildRequires: json_simple
BuildRequires: jspc
BuildRequires: jsr-305
BuildRequires: jsr-311
BuildRequires: jul-to-slf4j
BuildRequires: junit
BuildRequires: jzlib
BuildRequires: leveldbjni
@ -166,6 +163,7 @@ BuildRequires: metrics
BuildRequires: mockito
BuildRequires: native-maven-plugin
BuildRequires: netty3
BuildRequires: netty
BuildRequires: objectweb-asm
BuildRequires: objenesis >= 1.2-16
BuildRequires: openssl-devel
@ -184,7 +182,6 @@ BuildRequires: tomcat-log4j
BuildRequires: tomcat-servlet-3.1-api
BuildRequires: txw2
BuildRequires: xmlenc
BuildRequires: znerd-oss-parent
BuildRequires: zookeeper-java > 3.4.5-15
# For tests
BuildRequires: jersey1-test-framework
@ -428,10 +425,10 @@ offering local computation and storage.
This package contains files needed to run Apache Hadoop YARN in secure mode.
%prep
%autosetup -p1 -n %{name}-common-%{commit}
%autosetup -p1 -n %{name}-%{version}-src
%if 0%{?fedora} > 25
%pom_xpath_set "pom:properties/pom:protobuf.version" 3.1.0 hadoop-project
%pom_xpath_set "pom:properties/pom:protobuf.version" 3.2.0 hadoop-project
%else
%pom_xpath_set "pom:properties/pom:protobuf.version" 2.6.1 hadoop-project
%endif
@ -467,14 +464,82 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
# Disable the hadoop-minikdc module due to missing deps
%pom_disable_module hadoop-minikdc hadoop-common-project
%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-common
%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-auth
%pom_remove_dep :hadoop-minikdc hadoop-project
%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests
%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-kms
%pom_remove_dep :hadoop-minikdc hadoop-hdfs-project/hadoop-hdfs
%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry
%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager
%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZonesWithKMS.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java
# Remove other deps only needed for testing
%pom_remove_dep :tomcat-embed-core hadoop-project
%pom_remove_dep :tomcat-embed-logging-juli hadoop-project
%pom_remove_dep :tomcat-embed-core hadoop-common-project/hadoop-auth
%pom_remove_dep :tomcat-embed-logging-juli hadoop-common-project/hadoop-auth
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java
%pom_xpath_remove "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-project
%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-hdfs-project/hadoop-hdfs-httpfs
%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-common-project/hadoop-common
%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager
%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice
# Remove tests with errors - Tests are not needed for packaging so don't bother
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGILoginFromKeytab.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java
rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSWithZK.java
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithSaslDataTransfer.java
rm -rf hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager
rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/test/YarnTestDriver.java
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test
rm -f hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test
rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test
rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test
rm -rf hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test
rm -rf hadoop-tools/hadoop-streaming/src/test
rm -rf hadoop-tools/hadoop-gridmix/src/test/java
rm -rf hadoop-tools/hadoop-extras/src/test
# Remove dist plugin. It's not needed and has issues
%pom_remove_plugin :maven-antrun-plugin hadoop-common-project/hadoop-kms
%pom_remove_plugin :maven-antrun-plugin hadoop-dist
# remove plugin causing to build the same jar twice
%pom_remove_plugin :maven-jar-plugin hadoop-common-project/hadoop-auth
# modify version of apacheds-kerberos-codec to 2.0.0-M15
%pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='apacheds-kerberos-codec']/pom:version" 2.0.0-M21 hadoop-project
%if 0%{?fedora} > 25
# Disable hadoop-pipes, because it needs upstream patching for Openssl 1.1.0
@ -493,6 +558,23 @@ rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-test
%pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='asm']/pom:version" 5.0.2 hadoop-project
%pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='asm']/pom:groupId" org.ow2.asm hadoop-project
# Add missing deps
%pom_add_dep org.iq80.leveldb:leveldb hadoop-hdfs-project/hadoop-hdfs
%pom_add_dep org.iq80.leveldb:leveldb hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common
%pom_add_dep org.eclipse.jetty:jetty-util-ajax hadoop-hdfs-project/hadoop-hdfs
%pom_add_dep org.eclipse.jetty:jetty-util-ajax hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager
# remove plugins that are not needed
%pom_remove_plugin :maven-jar-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy
%pom_remove_plugin :maven-antrun-plugin hadoop-tools/hadoop-streaming
# disable microsoft azure because the package is not available
%pom_disable_module hadoop-azure hadoop-tools
%pom_remove_dep :hadoop-azure hadoop-tools/hadoop-tools-dist
# disable kms war because it breaks bundling policy
%pom_disable_module hadoop-kms hadoop-common-project
%pom_remove_dep :hadoop-kms hadoop-hdfs-project/hadoop-hdfs
# War files we don't want
%mvn_package :%{name}-auth-examples __noinstall
@ -536,7 +618,7 @@ rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-test
%ifarch s390x ppc64le
export MAVEN_OPTS="-Xms2048M -Xmx4096M"
%endif
%mvn_build -j -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT
%mvn_build -j -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT -Dmaven.javadoc.skip=true
# This takes a long time to run, so comment out for now
#%%check
@ -602,11 +684,19 @@ install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-yarn
install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-hdfs
install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-mapreduce
basedir='%{name}-dist/target/%{name}-%{hadoop_version}'
basedir='%{name}-common-project/%{name}-common/target/%{name}-common-%{hadoop_version}'
hdfsdir='%{name}-hdfs-project/%{name}-hdfs/target/%{name}-hdfs-%{hadoop_version}'
httpfsdir='%{name}-hdfs-project/%{name}-hdfs-httpfs/target/%{name}-hdfs-httpfs-%{hadoop_version}'
mapreddir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}'
yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}'
# copy script folders
for dir in bin libexec sbin
do
cp -arf $basedir/$dir %{buildroot}/%{_prefix}
cp -arf $hdfsdir/$dir %{buildroot}/%{_prefix}
cp -arf $mapreddir/$dir %{buildroot}/%{_prefix}
cp -arf $yarndir/$dir %{buildroot}/%{_prefix}
done
# This binary is obsoleted and causes a conflict with qt-devel
@ -618,11 +708,17 @@ rm -f %{buildroot}/%{_bindir}/test-container-executor
# Duplicate files
rm -f %{buildroot}/%{_sbindir}/hdfs-config.sh
# copy config files
cp -arf $basedir/etc/* %{buildroot}/%{_sysconfdir}
cp -arf $httpfsdir/etc/* %{buildroot}/%{_sysconfdir}
cp -arf $mapreddir/etc/* %{buildroot}/%{_sysconfdir}
cp -arf $yarndir/etc/* %{buildroot}/%{_sysconfdir}
# copy binaries
cp -arf $basedir/lib/native/libhadoop.so* %{buildroot}/%{_libdir}/%{name}
chrpath --delete %{buildroot}/%{_libdir}/%{name}/*
cp -arf $basedir/include/hdfs.h %{buildroot}/%{_includedir}/%{name}
cp -arf $basedir/lib/native/libhdfs.so* %{buildroot}/%{_libdir}
cp -arf $hdfsdir/include/hdfs.h %{buildroot}/%{_includedir}/%{name}
cp -arf $hdfsdir/lib/native/libhdfs.so* %{buildroot}/%{_libdir}
chrpath --delete %{buildroot}/%{_libdir}/libhdfs*
cp -af hadoop-hdfs-project/hadoop-hdfs/target/native/main/native/fuse-dfs/fuse_dfs %{buildroot}/%{_bindir}
chrpath --delete %{buildroot}/%{_bindir}/fuse_dfs
@ -675,10 +771,10 @@ pushd $basedir/share/%{name}/common/lib
popd
# hdfs jar dependencies
copy_dep_jars $basedir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib
copy_dep_jars $hdfsdir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib
%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/hdfs/lib
%{__ln_s} %{_jnidir}/%{name}/%{name}-hdfs-bkjournal.jar %{buildroot}/%{_datadir}/%{name}/hdfs/lib
pushd $basedir/share/%{name}/hdfs
pushd $hdfsdir/share/%{name}/hdfs
link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/hdfs
popd
@ -738,23 +834,25 @@ pushd %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat
popd
# mapreduce jar dependencies
copy_dep_jars $basedir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
mrdir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}'
copy_dep_jars $mrdir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
%{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
pushd $basedir/share/%{name}/mapreduce
pushd $mrdir/share/%{name}/mapreduce
link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/mapreduce
popd
# yarn jar dependencies
copy_dep_jars $basedir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib
yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}'
copy_dep_jars $yarndir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib
%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/yarn/lib
%{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/yarn/lib
pushd $basedir/share/%{name}/yarn
pushd $yarndir/share/%{name}/yarn
link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/yarn
popd
# Install hdfs webapp bits
cp -arf $basedir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
cp -arf $hdfsdir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
# hadoop layout. Convert to appropriate lib location for 32 and 64 bit archs
lib=$(echo %{?_libdir} | sed -e 's:/usr/\(.*\):\1:')
@ -903,17 +1001,20 @@ fi
%{_datadir}/%{name}/client
%files -f .mfiles common
%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/*
%config(noreplace) %{_sysconfdir}/%{name}/configuration.xsl
%doc LICENSE.txt
%doc NOTICE.txt
%doc README.txt
%config(noreplace) %{_sysconfdir}/%{name}/core-site.xml
%config(noreplace) %{_sysconfdir}/%{name}/%{name}-env.sh
%config(noreplace) %{_sysconfdir}/%{name}/%{name}-metrics.properties
%config(noreplace) %{_sysconfdir}/%{name}/%{name}-metrics2.properties
%config(noreplace) %{_sysconfdir}/%{name}/%{name}-policy.xml
%config(noreplace) %{_sysconfdir}/%{name}/log4j.properties
%config(noreplace) %{_sysconfdir}/%{name}/slaves
%config(noreplace) %{_sysconfdir}/%{name}/ssl-client.xml.example
%config(noreplace) %{_sysconfdir}/%{name}/ssl-server.xml.example
%config(noreplace) %{_sysconfdir}/%{name}/slaves
%config(noreplace) %{_sysconfdir}/%{name}/configuration.xsl
%dir %{_datadir}/%{name}
%dir %{_datadir}/%{name}/common
%{_datadir}/%{name}/common/lib
@ -984,7 +1085,6 @@ fi
%attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs/work
%files -n libhdfs
%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/hdfs/LICENSE.txt
%{_libdir}/libhdfs.so.*
%files -f .mfiles-%{name}-mapreduce mapreduce
@ -1006,7 +1106,6 @@ fi
%files -f .mfiles-%{name}-mapreduce-examples mapreduce-examples
%files -f .mfiles-%{name}-maven-plugin maven-plugin
%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/LICENSE.txt
%files -f .mfiles-%{name}-tests tests
@ -1037,6 +1136,9 @@ fi
%attr(6050,root,yarn) %{_bindir}/container-executor
%changelog
* Thu Mar 02 2017 Mike Miller <mmiller@apache.org> - 2.7.3-1
- Update to 2.7.3
* Fri Feb 10 2017 Fedora Release Engineering <releng@fedoraproject.org> - 2.4.1-26
- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild