From fe5fa11d25d9b66e338b4928ff490d35f1a7dde8 Mon Sep 17 00:00:00 2001 From: Mike Miller Date: Wed, 21 Dec 2016 15:02:57 -0500 Subject: [PATCH] Update to 2.7.3 --- fix-pom-errors.patch | 50 -- hadoop-2.4.1-jersey1.patch | 284 ------- hadoop-2.4.1-jets3t0.9.3.patch | 81 -- hadoop-2.4.1-new-bookkeeper.patch | 13 - hadoop-2.4.1-servlet-3.1-api.patch | 18 - hadoop-aws.patch | 60 ++ hadoop-dlopen-libjvm.patch | 103 ++- hadoop-fedora-integration.patch | 1226 +++++++++++----------------- hadoop-guava.patch | 417 +--------- hadoop-jersey1.patch | 13 + hadoop-maven.patch | 44 - hadoop-netty-3-Final.patch | 20 +- hadoop-openssl.patch | 37 + hadoop-tools.jar.patch | 30 +- hadoop.spec | 176 +++- 15 files changed, 858 insertions(+), 1714 deletions(-) delete mode 100644 fix-pom-errors.patch delete mode 100644 hadoop-2.4.1-jersey1.patch delete mode 100644 hadoop-2.4.1-jets3t0.9.3.patch delete mode 100644 hadoop-2.4.1-new-bookkeeper.patch delete mode 100644 hadoop-2.4.1-servlet-3.1-api.patch create mode 100644 hadoop-aws.patch create mode 100644 hadoop-jersey1.patch delete mode 100644 hadoop-maven.patch create mode 100644 hadoop-openssl.patch diff --git a/fix-pom-errors.patch b/fix-pom-errors.patch deleted file mode 100644 index 8153bd7..0000000 --- a/fix-pom-errors.patch +++ /dev/null @@ -1,50 +0,0 @@ ---- a/hadoop-project/pom.xml 2016-10-22 19:21:40.448895211 -0400 -+++ b/hadoop-project/pom.xml 2016-10-22 19:32:08.923535480 -0400 -@@ -919,6 +919,20 @@ - - pre-site - -+ -+ depcheck -+ -+ -+ -+ true -+ -+ -+ -+ -+ enforce -+ -+ verify -+ - - - -@@ -969,26 +983,6 @@ - false - - -- -- org.apache.maven.plugins -- maven-enforcer-plugin -- -- -- depcheck -- -- -- -- true -- -- -- -- -- enforce -- -- verify -- -- -- - - - diff --git a/hadoop-2.4.1-jersey1.patch b/hadoop-2.4.1-jersey1.patch deleted file mode 100644 index 40b23b3..0000000 --- a/hadoop-2.4.1-jersey1.patch +++ /dev/null @@ -1,284 +0,0 @@ -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-common-project/hadoop-common/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/pom.xml 2015-09-10 04:13:59.016972031 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-common-project/hadoop-common/pom.xml 2015-09-10 03:53:51.902302395 +0200 -@@ -112,22 +112,26 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - compile - - - - com.sun.jersey - jersey-json -+ ${jersey.version} - compile - - - com.sun.jersey - jersey-server -+ ${jersey.version} - compile - - - com.sun.jersey - jersey-servlet -+ ${jersey.version} - compile - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/pom.xml 2015-09-10 04:13:56.945073866 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs/pom.xml 2015-09-10 03:55:29.757492758 +0200 -@@ -83,11 +83,13 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - compile - - - com.sun.jersey - jersey-server -+ ${jersey.version} - compile - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml 2015-09-10 04:13:59.019971884 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml 2015-09-10 03:56:00.339989611 +0200 -@@ -67,11 +67,13 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - compile - - - com.sun.jersey - jersey-server -+ ${jersey.version} - compile - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml 2015-09-10 04:13:56.945073866 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml 2015-09-10 03:56:32.350416281 +0200 -@@ -97,11 +97,13 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - compile - - - com.sun.jersey - jersey-server -+ ${jersey.version} - compile - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-mapreduce-project/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-mapreduce-project/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-mapreduce-project/pom.xml 2015-09-10 04:13:56.999071212 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-mapreduce-project/pom.xml 2015-09-10 03:52:35.657049893 +0200 -@@ -128,6 +128,7 @@ - - com.sun.jersey - jersey-server -+ ${jersey.version} - - - asm -@@ -138,10 +139,12 @@ - - com.sun.jersey.contribs - jersey-guice -+ ${jersey.version} - - - com.google.inject.extensions - guice-servlet -+ ${jersey.version} - - - junit -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-project/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-project/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-project/pom.xml 2015-09-10 04:13:59.038970950 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-project/pom.xml 2015-09-10 03:46:03.557321815 +0200 -@@ -59,7 +59,7 @@ - 1.7.4 - - -- 1.17.1 -+ 1 - - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml 2015-09-10 04:13:57.003071015 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml 2015-09-10 03:47:14.870816716 +0200 -@@ -78,6 +78,7 @@ - - com.sun.jersey - jersey-client -+ ${jersey.version} - - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 2015-09-10 04:13:57.013070524 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 2015-09-10 03:46:50.182030184 +0200 -@@ -83,6 +83,7 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - - - org.codehaus.jackson -@@ -147,6 +148,7 @@ - - com.sun.jersey - jersey-server -+ ${jersey.version} - - - asm -@@ -157,10 +159,12 @@ - - com.sun.jersey - jersey-json -+ ${jersey.version} - - - com.sun.jersey.contribs - jersey-guice -+ ${jersey.version} - - - log4j -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml 2015-09-10 04:13:57.013070524 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml 2015-09-10 03:48:28.283208456 +0200 -@@ -99,15 +99,18 @@ - - com.sun.jersey.jersey-test-framework - jersey-test-framework-core -+ ${jersey.version} - test - - - com.sun.jersey - jersey-json -+ ${jersey.version} - - - com.sun.jersey.contribs - jersey-guice -+ ${jersey.version} - - - -@@ -137,10 +140,12 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - - - com.sun.jersey - jersey-client -+ ${jersey.version} - - - com.google.guava -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml 2015-09-10 04:13:57.013070524 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml 2015-09-10 03:49:21.079613483 +0200 -@@ -89,10 +89,12 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - - - com.sun.jersey - jersey-client -+ ${jersey.version} - - - org.eclipse.jetty -@@ -148,15 +150,18 @@ - - com.sun.jersey.jersey-test-framework - jersey-test-framework-grizzly2 -+ ${jersey.version} - test - - - com.sun.jersey - jersey-json -+ ${jersey.version} - - - com.sun.jersey.contribs - jersey-guice -+ ${jersey.version} - - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml 2015-09-10 04:13:57.022070082 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml 2015-09-10 03:50:18.954768886 +0200 -@@ -109,15 +109,18 @@ - - com.sun.jersey.jersey-test-framework - jersey-test-framework-core -+ ${jersey.version} - test - - - com.sun.jersey - jersey-json -+ ${jersey.version} - - - com.sun.jersey.contribs - jersey-guice -+ ${jersey.version} - - - -@@ -151,10 +154,12 @@ - - com.sun.jersey - jersey-core -+ ${jersey.version} - - - com.sun.jersey - jersey-client -+ ${jersey.version} - - - org.eclipse.jetty -@@ -210,6 +215,7 @@ - - com.sun.jersey.jersey-test-framework - jersey-test-framework-grizzly2 -+ ${jersey.version} - test - - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml 2015-09-10 04:13:57.026069885 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jersey/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml 2015-09-10 03:51:11.787172144 +0200 -@@ -119,6 +119,7 @@ - - com.sun.jersey.jersey-test-framework - jersey-test-framework-grizzly2 -+ ${jersey.version} - test - - diff --git a/hadoop-2.4.1-jets3t0.9.3.patch b/hadoop-2.4.1-jets3t0.9.3.patch deleted file mode 100644 index 2d6d98b..0000000 --- a/hadoop-2.4.1-jets3t0.9.3.patch +++ /dev/null @@ -1,81 +0,0 @@ -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java 2014-06-30 09:04:57.000000000 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java 2015-03-14 15:37:19.582587031 +0100 -@@ -91,17 +91,17 @@ - - S3Credentials s3Credentials = new S3Credentials(); - s3Credentials.initialize(uri, conf); -- try { -+ //try { - AWSCredentials awsCredentials = - new AWSCredentials(s3Credentials.getAccessKey(), - s3Credentials.getSecretAccessKey()); - this.s3Service = new RestS3Service(awsCredentials); -- } catch (S3ServiceException e) { -- if (e.getCause() instanceof IOException) { -- throw (IOException) e.getCause(); -- } -- throw new S3Exception(e); -- } -+ // } catch (S3ServiceException e) { -+ // if (e.getCause() instanceof IOException) { -+ // throw (IOException) e.getCause(); -+ // } -+ // throw new S3Exception(e); -+ // } - bucket = new S3Bucket(uri.getHost()); - - this.bufferSize = conf.getInt( -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java 2014-06-30 09:04:57.000000000 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java 2015-03-14 15:50:35.036095902 +0100 -@@ -117,7 +117,7 @@ - - - -- try { -+ //try { - String accessKey = null; - String secretAccessKey = null; - String userInfo = uri.getUserInfo(); -@@ -158,12 +158,12 @@ - AWSCredentials awsCredentials = - new AWSCredentials(accessKey, secretAccessKey); - this.s3Service = new RestS3Service(awsCredentials); -- } catch (S3ServiceException e) { -- if (e.getCause() instanceof IOException) { -- throw (IOException) e.getCause(); -- } -- throw new S3Exception(e); -- } -+ //} catch (S3ServiceException e) { -+ // if (e.getCause() instanceof IOException) { -+ // throw (IOException) e.getCause(); -+ // } -+ // throw new S3Exception(e); -+ //} - bucket = new S3Bucket(uri.getHost()); - } - -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java 2014-06-30 09:04:57.000000000 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.jets3t/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java 2015-03-14 15:24:05.397371065 +0100 -@@ -71,14 +71,14 @@ - public void initialize(URI uri, Configuration conf) throws IOException { - S3Credentials s3Credentials = new S3Credentials(); - s3Credentials.initialize(uri, conf); -- try { -+ //try { - AWSCredentials awsCredentials = - new AWSCredentials(s3Credentials.getAccessKey(), - s3Credentials.getSecretAccessKey()); - this.s3Service = new RestS3Service(awsCredentials); -- } catch (S3ServiceException e) { -- handleS3ServiceException(e); -- } -+ //} catch (S3ServiceException e) { -+ // handleS3ServiceException(e); -+ //} - multipartEnabled = - conf.getBoolean("fs.s3n.multipart.uploads.enabled", false); - multipartBlockSize = Math.min( diff --git a/hadoop-2.4.1-new-bookkeeper.patch b/hadoop-2.4.1-new-bookkeeper.patch deleted file mode 100644 index 2ee1fff..0000000 --- a/hadoop-2.4.1-new-bookkeeper.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.bookkeeper/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java 2014-06-30 09:04:57.000000000 +0200 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.bookkeeper/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/java/org/apache/hadoop/contrib/bkjournal/BookKeeperJournalManager.java 2016-01-09 13:43:26.831773352 +0100 -@@ -237,7 +237,7 @@ - zkPathLatch.countDown(); - } - }; -- ZkUtils.createFullPathOptimistic(zkc, zkAvailablePath, new byte[0], -+ ZkUtils.asyncCreateFullPathOptimistic(zkc, zkAvailablePath, new byte[0], - Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT, callback, null); - - try { - diff --git a/hadoop-2.4.1-servlet-3.1-api.patch b/hadoop-2.4.1-servlet-3.1-api.patch deleted file mode 100644 index 5b92a3b..0000000 --- a/hadoop-2.4.1-servlet-3.1-api.patch +++ /dev/null @@ -1,18 +0,0 @@ -diff -Nru hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.servlet/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java ---- hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java 2015-03-20 04:45:08.415241957 +0100 -+++ hadoop-common-9e2ef43a240fb0f603d8c384e501daec11524510.servlet/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java 2015-03-14 16:33:12.627551779 +0100 -@@ -308,5 +308,14 @@ - public void write(int b) throws IOException { - buffer.append((char) b); - } -+ -+ public void setWriteListener(javax.servlet.WriteListener listener) { -+ throw new UnsupportedOperationException("Not implemented yet."); -+ } -+ -+ public boolean isReady() { -+ return false; -+ } -+ - } - } diff --git a/hadoop-aws.patch b/hadoop-aws.patch new file mode 100644 index 0000000..e7d985f --- /dev/null +++ b/hadoop-aws.patch @@ -0,0 +1,60 @@ +diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java +index 901f89b..3a44a01 100644 +--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java ++++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java +@@ -91,17 +91,10 @@ class Jets3tFileSystemStore implements FileSystemStore { + + S3Credentials s3Credentials = new S3Credentials(); + s3Credentials.initialize(uri, conf); +- try { + AWSCredentials awsCredentials = + new AWSCredentials(s3Credentials.getAccessKey(), + s3Credentials.getSecretAccessKey()); + this.s3Service = new RestS3Service(awsCredentials); +- } catch (S3ServiceException e) { +- if (e.getCause() instanceof IOException) { +- throw (IOException) e.getCause(); +- } +- throw new S3Exception(e); +- } + bucket = new S3Bucket(uri.getHost()); + + this.bufferSize = conf.getInt( +diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java +index 429c272..411bd53 100644 +--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java ++++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java +@@ -117,7 +117,6 @@ public class MigrationTool extends Configured implements Tool { + + + +- try { + String accessKey = null; + String secretAccessKey = null; + String userInfo = uri.getUserInfo(); +@@ -158,12 +157,6 @@ public class MigrationTool extends Configured implements Tool { + AWSCredentials awsCredentials = + new AWSCredentials(accessKey, secretAccessKey); + this.s3Service = new RestS3Service(awsCredentials); +- } catch (S3ServiceException e) { +- if (e.getCause() instanceof IOException) { +- throw (IOException) e.getCause(); +- } +- throw new S3Exception(e); +- } + bucket = new S3Bucket(uri.getHost()); + } + +diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +index a10d6f2..3e2fa38 100644 +--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java ++++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +@@ -83,7 +83,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { + new AWSCredentials(s3Credentials.getAccessKey(), + s3Credentials.getSecretAccessKey()); + this.s3Service = new RestS3Service(awsCredentials); +- } catch (S3ServiceException e) { ++ } catch (Exception e) { + handleException(e); + } + multipartEnabled = diff --git a/hadoop-dlopen-libjvm.patch b/hadoop-dlopen-libjvm.patch index 5695be4..6871e0b 100644 --- a/hadoop-dlopen-libjvm.patch +++ b/hadoop-dlopen-libjvm.patch @@ -14,13 +14,13 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt b/hadoop-hdfs-pr index 82d1a32..2151bb8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt -@@ -99,7 +99,6 @@ if (NEED_LINK_DL) +@@ -147,7 +147,6 @@ if (NEED_LINK_DL) endif(NEED_LINK_DL) target_link_dual_libraries(hdfs - ${JAVA_JVM_LIBRARY} ${LIB_DL} - pthread + ${OS_LINK_LIBRARIES} ) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/CMakeLists.txt index dd3f1e6..68ba422 100644 @@ -34,29 +34,52 @@ index dd3f1e6..68ba422 100644 hdfs m pthread +@@ -77,16 +77,6 @@ IF(FUSE_FOUND) + pthread + rt + ) +- add_executable(test_fuse_dfs +- test/test_fuse_dfs.c +- test/fuse_workload.c +- ) +- target_link_libraries(test_fuse_dfs +- ${FUSE_LIBRARIES} +- native_mini_dfs +- posix_util +- pthread +- ) + ELSE(FUSE_FOUND) + IF(REQUIRE_FUSE) + MESSAGE(FATAL_ERROR "Required component fuse_dfs could not be built.") diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c index 878289f..62686b3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/jni_helper.c -@@ -20,6 +20,7 @@ - #include "exception.h" - #include "jni_helper.h" +@@ -24,6 +24,7 @@ + #include "os/mutexes.h" + #include "os/thread_local_storage.h" +#include #include #include -@@ -442,10 +443,82 @@ static JNIEnv* getGlobalJNIEnv(void) +@@ -373,6 +374,12 @@ static JNIEnv* getGlobalJNIEnv(void) jint rv = 0; jint noVMs = 0; jthrowable jthr; + void *jvmHandle = NULL; + jint JNICALL (*getCreatedJavaJVMsPtr)(JavaVM **, jsize, jsize *); -+ getCreatedJavaJVMsPtr = NULL; ++ getCreatedJavaJVMsPtr = NULL; + jint JNICALL (*createJavaVMPtr)(JavaVM **, void **, void *); + createJavaVMPtr = NULL; + char *dlsym_error = NULL; -+ + char *hadoopClassPath; + const char *hadoopClassPathVMArg = "-Djava.class.path="; + size_t optHadoopClassPathLen; +@@ -384,10 +391,75 @@ static JNIEnv* getGlobalJNIEnv(void) + JavaVMInitArgs vm_args; + JavaVM *vm; + JavaVMOption *options; + //Get JAVA_HOME to use appropriate libjvm + char *javaHome = getenv("JAVA_HOME"); + if (javaHome == NULL) { @@ -114,7 +137,7 @@ index 878289f..62686b3 100644 + return NULL; + } -- rv = JNI_GetCreatedJavaVMs(&(vmBuf[0]), vmBufLength, &noVMs); +- rv = JNI_GetCreatedJavaVMs(&(vmBuf[0]), VM_BUF_LENGTH, &noVMs); + //Load the JNI_GetCreatedJavaVMs function from the libjvm library + getCreatedJavaJVMsPtr = (jint JNICALL (*)(JavaVM **, jsize, jsize *)) dlsym(jvmHandle, "JNI_GetCreatedJavaVMs"); + dlsym_error = dlerror(); @@ -123,21 +146,21 @@ index 878289f..62686b3 100644 + dlclose(jvmHandle); + return NULL; + } -+ rv = (*getCreatedJavaJVMsPtr)(&(vmBuf[0]), vmBufLength, &noVMs); ++ rv = (*getCreatedJavaJVMsPtr)(&(vmBuf[0]), VM_BUF_LENGTH, &noVMs); if (rv != 0) { fprintf(stderr, "JNI_GetCreatedJavaVMs failed with error: %d\n", rv); + dlclose(jvmHandle); return NULL; } -@@ -454,6 +527,7 @@ static JNIEnv* getGlobalJNIEnv(void) - char *hadoopClassPath = getenv("CLASSPATH"); +@@ -396,6 +468,7 @@ static JNIEnv* getGlobalJNIEnv(void) + hadoopClassPath = getenv("CLASSPATH"); if (hadoopClassPath == NULL) { fprintf(stderr, "Environment variable CLASSPATH not set!\n"); + dlclose(jvmHandle); return NULL; } - char *hadoopClassPathVMArg = "-Djava.class.path="; + optHadoopClassPathLen = strlen(hadoopClassPath) + @@ -502,7 +576,15 @@ static JNIEnv* getGlobalJNIEnv(void) vm_args.nOptions = noArgs; vm_args.ignoreUnrecognized = 1; @@ -163,3 +186,57 @@ index 878289f..62686b3 100644 return NULL; } jthr = invokeMethod(env, NULL, STATIC, NULL, +@@ -470,6 +552,7 @@ static JNIEnv* getGlobalJNIEnv(void) + if (rv != 0) { + fprintf(stderr, "Call to AttachCurrentThread " + "failed with error: %d\n", rv); ++ dlclose(jvmHandle); + return NULL; + } + } +diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt +index 74f20dd..255d4ab 100644 +--- a/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt ++++ b/hadoop-hdfs-project/hadoop-hdfs/src/CMakeLists.txt +@@ -161,7 +161,6 @@ add_executable(test_libhdfs_ops + ) + target_link_libraries(test_libhdfs_ops + hdfs_static +- ${JAVA_JVM_LIBRARY} + ) + + add_executable(test_libhdfs_read +@@ -169,7 +168,6 @@ add_executable(test_libhdfs_read + ) + target_link_libraries(test_libhdfs_read + hdfs_static +- ${JAVA_JVM_LIBRARY} + ) + + add_executable(test_libhdfs_write +@@ -177,7 +175,6 @@ add_executable(test_libhdfs_write + ) + target_link_libraries(test_libhdfs_write + hdfs_static +- ${JAVA_JVM_LIBRARY} + ) + + add_library(native_mini_dfs +@@ -189,17 +186,9 @@ add_library(native_mini_dfs + ${OS_DIR}/thread_local_storage.c + ) + target_link_libraries(native_mini_dfs +- ${JAVA_JVM_LIBRARY} + ${OS_LINK_LIBRARIES} + ) + +-add_executable(test_native_mini_dfs +- main/native/libhdfs/test_native_mini_dfs.c +-) +-target_link_libraries(test_native_mini_dfs +- native_mini_dfs +-) +- + add_executable(test_libhdfs_threaded + main/native/libhdfs/expect.c + main/native/libhdfs/test_libhdfs_threaded.c diff --git a/hadoop-fedora-integration.patch b/hadoop-fedora-integration.patch index dc1129b..13a7166 100644 --- a/hadoop-fedora-integration.patch +++ b/hadoop-fedora-integration.patch @@ -2,22 +2,7 @@ diff --git a/hadoop-client/pom.xml b/hadoop-client/pom.xml index c6f6c1b..7a3e0d4 100644 --- a/hadoop-client/pom.xml +++ b/hadoop-client/pom.xml -@@ -40,12 +40,8 @@ - compile - - -- tomcat -- jasper-compiler -- -- -- tomcat -- jasper-runtime -+ org.apache.tomcat -+ tomcat-jasper - - - javax.servlet -@@ -60,24 +56,20 @@ +@@ -60,20 +56,16 @@ commons-logging-api @@ -37,12 +22,6 @@ index c6f6c1b..7a3e0d4 100644 - org.mortbay.jetty -- jsp-api-2.1 -+ org.eclipse.jetty -+ jetty-servlet - - -- org.mortbay.jetty - servlet-api-2.5 + org.eclipse.jetty + jetty-webapp @@ -64,7 +43,7 @@ diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/h index b9d6c60..9330a1a 100644 --- a/hadoop-common-project/hadoop-auth/pom.xml +++ b/hadoop-common-project/hadoop-auth/pom.xml -@@ -53,18 +53,9 @@ +@@ -53,13 +53,9 @@ test @@ -74,26 +53,21 @@ index b9d6c60..9330a1a 100644 - - - org.mortbay.jetty -- jetty-util -- test -- -- -- org.mortbay.jetty - jetty + org.eclipse.jetty + jetty-servlet + 8.1.14.v20131031 test - + diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java index 4e4ecc4..3429931 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java -@@ -14,11 +14,12 @@ - package org.apache.hadoop.security.authentication.client; - - import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +@@ -29,11 +29,12 @@ import org.apache.http.entity.InputStreamEntity; + import org.apache.http.impl.auth.SPNegoSchemeFactory; + import org.apache.http.impl.client.SystemDefaultHttpClient; + import org.apache.http.util.EntityUtils; -import org.mortbay.jetty.Server; -import org.mortbay.jetty.servlet.Context; -import org.mortbay.jetty.servlet.FilterHolder; @@ -107,25 +81,26 @@ index 4e4ecc4..3429931 100644 import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; -@@ -35,13 +36,14 @@ +@@ -52,6 +53,7 @@ import java.net.HttpURLConnection; import java.net.ServerSocket; import java.net.URL; - import java.util.Properties; + import java.security.Principal; +import java.util.EnumSet; + import java.util.Properties; import org.junit.Assert; - public class AuthenticatorTestCase { - private Server server; - private String host = null; +@@ -61,7 +63,7 @@ public class AuthenticatorTestCase { private int port = -1; + private boolean useTomcat = false; + private Tomcat tomcat = null; - Context context; + ServletContextHandler context; private static Properties authenticatorConfig; -@@ -82,10 +84,10 @@ protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws S +@@ -120,10 +122,10 @@ public class AuthenticatorTestCase { - protected void start() throws Exception { + protected void startJetty() throws Exception { server = new Server(0); - context = new Context(); + context = new ServletContextHandler(); @@ -135,12 +110,12 @@ index 4e4ecc4..3429931 100644 + context.addFilter(new FilterHolder(TestFilter.class), "/*", EnumSet.of(DispatcherType.REQUEST)); context.addServlet(new ServletHolder(TestServlet.class), "/bar"); host = "localhost"; - ServerSocket ss = new ServerSocket(0); + port = getLocalPort(); diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 7cf67a3..ef2733f 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml -@@ -87,18 +87,25 @@ +@@ -89,25 +89,34 @@ compile @@ -148,6 +123,7 @@ index 7cf67a3..ef2733f 100644 - servlet-api + org.eclipse.jetty + jetty-server ++ 8.1 compile @@ -155,6 +131,7 @@ index 7cf67a3..ef2733f 100644 - jetty + org.eclipse.jetty + jetty-util ++ 8.1 compile @@ -162,17 +139,27 @@ index 7cf67a3..ef2733f 100644 - jetty-util + org.eclipse.jetty + jetty-servlet -+ 8.1.14.v20131031 ++ 8.1 + compile + + + org.eclipse.jetty + jetty-webapp -+ 8.1.14.v20131031 ++ 8.1 compile -@@ -118,21 +125,26 @@ + +- javax.servlet.jsp +- jsp-api +- runtime ++ org.apache.tomcat ++ tomcat-el-api ++ 7.0.37 + + + com.sun.jersey +@@ -125,6 +134,11 @@ jersey-server compile @@ -183,31 +170,7 @@ index 7cf67a3..ef2733f 100644 + -- tomcat -- jasper-compiler -- runtime -+ org.apache.tomcat -+ tomcat-servlet-api -+ 7.0.37 - - -- tomcat -- jasper-runtime -- runtime -+ org.glassfish.web -+ javax.servlet.jsp -+ 2.2.6 - - -- javax.servlet.jsp -- jsp-api -- runtime -+ org.apache.tomcat -+ tomcat-el-api -+ 7.0.37 - - - commons-el + commons-logging diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java index ef562b4..a4b05a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java @@ -522,7 +485,7 @@ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop index 2f28d08..3ac7086 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java -@@ -39,6 +39,7 @@ +@@ -40,6 +40,7 @@ import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; @@ -530,7 +493,7 @@ index 2f28d08..3ac7086 100644 import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; -@@ -61,29 +62,30 @@ +@@ -69,29 +70,30 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Shell; @@ -584,22 +547,23 @@ index 2f28d08..3ac7086 100644 import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -@@ -138,8 +140,8 @@ private ListenerInfo(boolean isManaged, Connector listener) { +@@ -135,8 +137,8 @@ private ListenerInfo(boolean isManaged, Connector listener) { protected final WebAppContext webAppContext; protected final boolean findPort; - protected final Map defaultContexts = -- new HashMap(); +- new HashMap<>(); + protected final Map defaultContexts = + new HashMap(); - protected final List filterNames = new ArrayList(); + protected final List filterNames = new ArrayList<>(); static final String STATE_DESCRIPTION_ALIVE = " - alive"; static final String STATE_DESCRIPTION_NOT_LIVE = " - not live"; -@@ -305,21 +307,23 @@ public HttpServer2 build() throws IOException { +@@ -299,22 +301,23 @@ public HttpServer2 build() throws IOException { if ("http".equals(scheme)) { listener = HttpServer2.createDefaultChannelConnector(); } else if ("https".equals(scheme)) { -- SslSocketConnector c = new SslSocketConnector(); +- SslSocketConnector c = new SslSocketConnectorSecure(); +- c.setHeaderBufferSize(1024*64); - c.setNeedClientAuth(needsClientAuth); - c.setKeyPassword(keyPassword); + // Jetty 8+ moved JKS config to SslContextFactory @@ -628,7 +592,7 @@ index 2f28d08..3ac7086 100644 listener = c; } else { -@@ -362,7 +366,8 @@ private void initializeWebServer(String name, String hostName, +@@ -369,7 +373,8 @@ private void initializeWebServer(String name, String hostName, if (sm instanceof AbstractSessionManager) { AbstractSessionManager asm = (AbstractSessionManager)sm; asm.setHttpOnly(true); @@ -638,7 +602,7 @@ index 2f28d08..3ac7086 100644 } ContextHandlerCollection contexts = new ContextHandlerCollection(); -@@ -380,11 +385,14 @@ private void initializeWebServer(String name, String hostName, +@@ -387,11 +392,14 @@ private void initializeWebServer(String name, String hostName, final String appDir = getWebAppsPath(name); @@ -655,7 +619,7 @@ index 2f28d08..3ac7086 100644 final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { conf = new Configuration(conf); -@@ -452,7 +460,8 @@ public static Connector createDefaultChannelConnector() { +@@ -508,7 +516,8 @@ public static Connector createDefaultChannelConnector() { // the same port with indeterminate routing of incoming requests to them ret.setReuseAddress(false); } @@ -665,7 +629,7 @@ index 2f28d08..3ac7086 100644 return ret; } -@@ -485,7 +494,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, +@@ -541,7 +550,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = System.getProperty("hadoop.log.dir"); if (logDir != null) { @@ -674,7 +638,7 @@ index 2f28d08..3ac7086 100644 logContext.setResourceBase(logDir); logContext.addServlet(AdminAuthorizedServlet.class, "/*"); if (conf.getBoolean( -@@ -494,7 +503,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, +@@ -550,7 +559,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, @SuppressWarnings("unchecked") Map params = logContext.getInitParams(); params.put( @@ -683,7 +647,7 @@ index 2f28d08..3ac7086 100644 } logContext.setDisplayName("logs"); setContextAttributes(logContext, conf); -@@ -502,7 +511,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, +@@ -558,7 +567,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, defaultContexts.put(logContext, true); } // set up the context for "/static/*" @@ -692,7 +656,7 @@ index 2f28d08..3ac7086 100644 staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.setDisplayName("static"); -@@ -510,7 +519,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, +@@ -566,7 +575,7 @@ protected void addDefaultApps(ContextHandlerCollection parent, defaultContexts.put(staticContext, true); } @@ -701,13 +665,12 @@ index 2f28d08..3ac7086 100644 context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); } -@@ -527,9 +536,12 @@ protected void addDefaultServlets() { +@@ -583,8 +592,11 @@ protected void addDefaultServlets() { addServlet("conf", "/conf", ConfServlet.class); } -- public void addContext(Context ctxt, boolean isFiltered) -+ public void addContext(ServletContextHandler ctxt, boolean isFiltered) - throws IOException { +- public void addContext(Context ctxt, boolean isFiltered) { ++ public void addContext(ServletContextHandler ctxt, boolean isFiltered) { - webServer.addHandler(ctxt); + ContextHandlerCollection handlers = new ContextHandlerCollection(); + handlers.setHandlers(webServer.getHandlers()); @@ -716,7 +679,7 @@ index 2f28d08..3ac7086 100644 addNoCacheFilter(webAppContext); defaultContexts.put(ctxt, isFiltered); } -@@ -631,7 +643,7 @@ public void addInternalServlet(String name, String pathSpec, +@@ -669,7 +681,7 @@ public void addInternalServlet(String name, String pathSpec, FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(SPNEGO_FILTER); @@ -725,46 +688,55 @@ index 2f28d08..3ac7086 100644 handler.addFilterMapping(fmap); } } -@@ -645,9 +657,9 @@ public void addFilter(String name, String classname, - LOG.info("Added filter " + name + " (class=" + classname - + ") to context " + webAppContext.getDisplayName()); +@@ -686,9 +686,9 @@ public final class HttpServer2 implements FilterContainer { + "Added filter " + name + " (class=" + classname + ") to context " + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; + fmap = getFilterMapping(name, ALL_URLS); - for (Map.Entry e : defaultContexts.entrySet()) { + for (Map.Entry e : defaultContexts.entrySet()) { if (e.getValue()) { - Context ctx = e.getKey(); + ServletContextHandler ctx = e.getKey(); - defineFilter(ctx, name, classname, parameters, ALL_URLS); + defineFilter(ctx, filterHolder, fmap); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + ctx.getDisplayName()); -@@ -661,7 +673,7 @@ public void addGlobalFilter(String name, String classname, - Map parameters) { - final String[] ALL_URLS = { "/*" }; - defineFilter(webAppContext, name, classname, parameters, ALL_URLS); +@@ -704,7 +704,7 @@ public final class HttpServer2 implements FilterContainer { + FilterHolder filterHolder = getFilterHolder(name, classname, parameters); + FilterMapping fmap = getFilterMapping(name, ALL_URLS); + defineFilter(webAppContext, filterHolder, fmap); - for (Context ctx : defaultContexts.keySet()) { + for (ServletContextHandler ctx : defaultContexts.keySet()) { - defineFilter(ctx, name, classname, parameters, ALL_URLS); + defineFilter(ctx, filterHolder, fmap); } LOG.info("Added global filter '" + name + "' (class=" + classname + ")"); -@@ -670,7 +682,7 @@ public void addGlobalFilter(String name, String classname, +@@ -713,7 +725,7 @@ /** * Define a filter for a context and set up default url mappings. */ - public static void defineFilter(Context ctx, String name, + public static void defineFilter(ServletContextHandler ctx, String name, String classname, Map parameters, String[] urls) { - - FilterHolder holder = new FilterHolder(); -@@ -679,7 +691,7 @@ public static void defineFilter(Context ctx, String name, - holder.setInitParameters(parameters); + FilterHolder filterHolder = getFilterHolder(name, classname, parameters); + FilterMapping fmap = getFilterMapping(name, urls); +@@ -723,7 +734,7 @@ public final class HttpServer2 implements FilterContainer { + /** + * Define a filter for a context and set up default url mappings. + */ +- private static void defineFilter(Context ctx, FilterHolder holder, ++ private static void defineFilter(ServletContextHandler ctx, FilterHolder holder, + FilterMapping fmap) { + ServletHandler handler = ctx.getServletHandler(); + handler.addFilter(holder, fmap); +@@ -732,7 +732,7 @@ + private static FilterMapping getFilterMapping(String name, String[] urls) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpecs(urls); - fmap.setDispatches(Handler.ALL); + fmap.setDispatches(FilterMapping.ALL); fmap.setFilterName(name); - ServletHandler handler = ctx.getServletHandler(); - handler.addFilter(holder, fmap); -@@ -691,13 +703,13 @@ public static void defineFilter(Context ctx, String name, + return fmap; + } +@@ -752,13 +752,13 @@ * @param webAppCtx The WebApplicationContext to add to */ protected void addFilterPathMapping(String pathSpec, @@ -780,7 +752,7 @@ index 2f28d08..3ac7086 100644 handler.addFilterMapping(fmap); } } -@@ -751,7 +763,8 @@ public InetSocketAddress getConnectorAddress(int index) { +@@ -812,7 +823,8 @@ public InetSocketAddress getConnectorAddress(int index) { return null; Connector c = webServer.getConnectors()[index]; @@ -790,16 +762,16 @@ index 2f28d08..3ac7086 100644 // The connector is not bounded return null; } -@@ -841,7 +854,7 @@ private void loadListeners() { +@@ -901,7 +901,7 @@ public final class HttpServer2 implements FilterContainer { + */ void openListeners() throws Exception { - for (ListenerInfo li : listeners) { - Connector listener = li.listener; -- if (!li.isManaged || li.listener.getLocalPort() != -1) { -+ if (!li.isManaged || (li.listener.getLocalPort() != -1 && li.listener.getLocalPort() != -2)) { + for (Connector listener : listeners) { +- if (listener.getLocalPort() != -1) { ++ if (listener.getLocalPort() != -1 && listener.getLocalPort() != -2) { // This listener is either started externally or has been bound continue; } -@@ -1198,8 +1211,8 @@ public void doFilter(ServletRequest request, +@@ -1249,8 +1262,8 @@ public void doFilter(ServletRequest request, */ private String inferMimeType(ServletRequest request) { String path = ((HttpServletRequest)request).getRequestURI(); @@ -909,14 +881,14 @@ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop index c0aaf64..a29e275 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java -@@ -36,6 +36,7 @@ - import java.net.URI; +@@ -37,6 +37,7 @@ import java.net.URI; import java.net.URL; import java.security.GeneralSecurityException; + import java.net.HttpCookie; +import java.util.HashMap; + import java.util.List; public class TestHttpCookieFlag { - private static final String BASEDIR = System.getProperty("test.build.dir", @@ -70,7 +71,7 @@ public void destroy() { @Override public void initFilter(FilterContainer container, Configuration conf) { @@ -941,71 +913,6 @@ index 23e0d3e..24be3fe 100644 import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java -index cb86275..2c1c7bd 100644 ---- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java -+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java -@@ -61,10 +61,11 @@ - import org.junit.AfterClass; - import org.junit.BeforeClass; - import org.junit.Test; -+import static org.junit.matchers.JUnitMatchers.*; - import org.mockito.Mockito; - import org.mockito.internal.util.reflection.Whitebox; --import org.mortbay.jetty.Connector; --import org.mortbay.util.ajax.JSON; -+import org.eclipse.jetty.server.Connector; -+import org.eclipse.jetty.util.ajax.JSON; - - import static org.mockito.Mockito.*; - -@@ -243,7 +244,7 @@ public void run() { - conn = (HttpURLConnection)servletUrl.openConnection(); - conn.connect(); - assertEquals(200, conn.getResponseCode()); -- assertEquals("text/plain; charset=utf-8", conn.getContentType()); -+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/plain")).and(containsString("charset=utf-8"))); - - // We should ignore parameters for mime types - ie a parameter - // ending in .css should not change mime type -@@ -251,21 +252,21 @@ public void run() { - conn = (HttpURLConnection)servletUrl.openConnection(); - conn.connect(); - assertEquals(200, conn.getResponseCode()); -- assertEquals("text/plain; charset=utf-8", conn.getContentType()); -+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/plain")).and(containsString("charset=utf-8"))); - - // Servlets that specify text/html should get that content type - servletUrl = new URL(baseUrl, "/htmlcontent"); - conn = (HttpURLConnection)servletUrl.openConnection(); - conn.connect(); - assertEquals(200, conn.getResponseCode()); -- assertEquals("text/html; charset=utf-8", conn.getContentType()); -+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8"))); - - // JSPs should default to text/html with utf8 -- servletUrl = new URL(baseUrl, "/testjsp.jsp"); -- conn = (HttpURLConnection)servletUrl.openConnection(); -- conn.connect(); -- assertEquals(200, conn.getResponseCode()); -- assertEquals("text/html; charset=utf-8", conn.getContentType()); -+// servletUrl = new URL(baseUrl, "/testjsp.jsp"); -+// conn = (HttpURLConnection)servletUrl.openConnection(); -+// conn.connect(); -+// assertEquals(200, conn.getResponseCode()); -+// assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8"))); - } - - /** -@@ -306,7 +307,7 @@ public DummyFilterInitializer() { - - @Override - public void initFilter(FilterContainer container, Configuration conf) { -- container.addFilter("DummyFilter", DummyServletFilter.class.getName(), null); -+ container.addFilter("DummyFilter", DummyServletFilter.class.getName(), new HashMap(0)); - } - } - diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index 09f31df..be80795 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -1123,15 +1030,6 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-projec index d01a32f..d85405b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml -@@ -34,7 +34,7 @@ - Apache Hadoop HttpFS - - -- 6.0.36 -+ 7.0.37 - REPO NOT AVAIL - REPO NOT AVAIL - REVISION NOT AVAIL @@ -45,7 +45,7 @@ LOCALHOST @@ -1152,22 +1050,7 @@ index d01a32f..d85405b 100644 test -@@ -108,12 +108,8 @@ - commons-httpclient - - -- tomcat -- jasper-compiler -- -- -- tomcat -- jasper-runtime -+ org.apache.tomcat -+ tomcat-jasper - - - javax.servlet -@@ -128,20 +124,20 @@ +@@ -128,16 +124,16 @@ jsp-api @@ -1183,35 +1066,14 @@ index d01a32f..d85405b 100644 - org.mortbay.jetty -- jsp-api-2.1 -+ org.eclipse.jetty -+ jetty-servlet - - -- org.mortbay.jetty - servlet-api-2.5 + org.eclipse.jetty + jetty-webapp net.java.dev.jets3t -@@ -171,12 +167,8 @@ - commons-httpclient - - -- tomcat -- jasper-compiler -- -- -- tomcat -- jasper-runtime -+ org.apache.tomcat -+ tomcat-jasper - - - javax.servlet -@@ -191,20 +183,20 @@ - jsp-api +@@ -158,16 +150,16 @@ + servlet-api - org.mortbay.jetty @@ -1226,12 +1088,6 @@ index d01a32f..d85405b 100644 - org.mortbay.jetty -- jsp-api-2.1 -+ org.eclipse.jetty -+ jetty-servlet - - -- org.mortbay.jetty - servlet-api-2.5 + org.eclipse.jetty + jetty-webapp @@ -1368,7 +1224,7 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/had index d512897..b277973 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java -@@ -42,8 +42,8 @@ +@@ -46,8 +46,8 @@ import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -1377,8 +1233,8 @@ index d512897..b277973 100644 +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; - import java.io.File; - import java.io.FileOutputStream; + import com.google.common.collect.Lists; + @@ -108,7 +108,7 @@ private void createHttpFSServer() throws Exception { URL url = cl.getResource("webapp"); WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); @@ -1388,26 +1244,11 @@ index d512897..b277973 100644 server.start(); } -diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSCustomUserName.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSCustomUserName.java -index e8407fc..7805633 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSCustomUserName.java -+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSCustomUserName.java -@@ -41,8 +41,8 @@ - import org.json.simple.parser.JSONParser; - import org.junit.Assert; - import org.junit.Test; --import org.mortbay.jetty.Server; --import org.mortbay.jetty.webapp.WebAppContext; -+import org.eclipse.jetty.server.Server; -+import org.eclipse.jetty.webapp.WebAppContext; - - import java.io.BufferedReader; - import java.io.File; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java index 48cca42..f893127 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java -@@ -56,8 +56,8 @@ +@@ -64,8 +64,8 @@ import org.apache.hadoop.test.TestJettyHelper; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.junit.Test; @@ -1416,8 +1257,8 @@ index 48cca42..f893127 100644 +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.webapp.WebAppContext; - public class TestHttpFSServer extends HFSTestCase { - + import com.google.common.collect.Maps; + import java.util.Properties; @@ -157,7 +157,7 @@ private void createHttpFSServer(boolean addDelegationTokenAuthHandler) URL url = cl.getResource("webapp"); WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs"); @@ -1546,29 +1387,6 @@ index 5ee5841..8b56730 100644 jetty-util compile -@@ -135,8 +135,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - compile - - -- javax.servlet.jsp -- jsp-api -+ org.glassfish.web -+ javax.servlet.jsp - compile - - -@@ -180,11 +180,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - compile - - -- tomcat -- jasper-runtime -- compile -- -- - xmlenc - xmlenc - compile diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 420e5d2..c134d71 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -1589,18 +1407,6 @@ index 420e5d2..c134d71 100644 jetty-util compile -@@ -121,11 +121,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - compile - - -- javax.servlet.jsp -- jsp-api -- compile -- -- - log4j - log4j - compile @@ -136,11 +131,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> compile @@ -1613,86 +1419,6 @@ index 420e5d2..c134d71 100644 junit junit test -@@ -166,11 +156,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - compile - - -- tomcat -- jasper-runtime -- compile -- -- - xmlenc - xmlenc - compile -@@ -278,20 +263,40 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - - - -+ -+ org.codehaus.mojo.jspc -+ jspc-compiler-tomcat6 -+ 2.0-alpha-3 -+ -+ -+ org.apache.tomcat -+ * -+ -+ -+ -+ -+ -+ org.apache.tomcat -+ tomcat-servlet-api -+ 7.0.37 -+ - -- org.codehaus.mojo.jspc -- jspc-compiler-tomcat5 -- 2.0-alpha-3 -+ org.apache.tomcat -+ tomcat-el-api -+ 7.0.37 - - -- org.slf4j -- slf4j-log4j12 -- 1.4.1 -+ org.glassfish.web -+ javax.servlet.jsp -+ 2.2.5 -+ runtime - - -- org.slf4j -- jcl104-over-slf4j -- 1.4.1 -+ org.codehaus.groovy -+ groovy -+ 1.8.9 - - - -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java -index 32b0583..4930816 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java -@@ -156,6 +156,13 @@ int checkBookiesUp(int count, int timeout) throws Exception { - List children = zkc.getChildren("/ledgers/available", - false); - mostRecentSize = children.size(); -+ // TODO: Bookkeeper 4.2.0 introduced "readonly" bookies -+ // which mess with test bookie counts; -+ // unclear why setReadOnlyModeEnabled(false) doesn't have -+ // backward-compat effect hoped for -+ if (children.contains("readonly")) { -+ mostRecentSize = children.size()-1; -+ } - if (LOG.isDebugEnabled()) { - LOG.debug("Found " + mostRecentSize + " bookies up, " - + "waiting for " + count); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java index 50b44f8..d5a91d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java @@ -1710,40 +1436,23 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdf index fc85a5e..1610c8c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java -@@ -88,7 +88,7 @@ - import org.apache.hadoop.util.*; - import org.apache.hadoop.util.DiskChecker.DiskErrorException; - import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException; +@@ -202,7 +202,7 @@ import org.apache.hadoop.util.VersionInfo; + import org.apache.hadoop.tracing.SpanReceiverHost; + import org.apache.hadoop.tracing.SpanReceiverInfo; + import org.apache.hadoop.tracing.TraceAdminProtocol; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; - import javax.management.ObjectName; - -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java -index 477b7f6..8a22654 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java -@@ -30,10 +30,11 @@ - import org.apache.hadoop.http.HttpConfig; - import org.apache.hadoop.http.HttpServer2; - import org.apache.hadoop.security.UserGroupInformation; --import org.mortbay.jetty.Connector; -+import org.eclipse.jetty.server.Connector; - import com.google.common.annotations.VisibleForTesting; - -+ - /** - * Utility class to start a datanode in a secure cluster, first obtaining - * privileged resources before main startup and handing them to the datanode. + import com.google.common.base.Joiner; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 4232e00..3386dff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java -@@ -264,7 +264,7 @@ - import org.apache.log4j.Appender; +@@ -289,7 +289,7 @@ import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Logger; + import org.codehaus.jackson.map.ObjectMapper; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; @@ -1762,32 +1471,6 @@ index aa4ba5d..5b945ba 100644 @InterfaceAudience.Private public class StreamFile extends DfsServlet { -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java -index 50a7f21..1d96e15 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java -@@ -32,7 +32,7 @@ - import org.apache.hadoop.security.token.TokenIdentifier; - import org.apache.hadoop.util.DataChecksum; - import org.apache.hadoop.util.StringUtils; --import org.mortbay.util.ajax.JSON; -+import org.eclipse.jetty.util.ajax.JSON; - - import java.io.ByteArrayInputStream; - import java.io.DataInputStream; -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -index 6aa935c..dfc1e39 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -@@ -98,7 +98,7 @@ - import org.apache.hadoop.security.token.Token; - import org.apache.hadoop.security.token.TokenIdentifier; - import org.apache.hadoop.util.Progressable; --import org.mortbay.util.ajax.JSON; -+import org.eclipse.jetty.util.ajax.JSON; - - import com.google.common.annotations.VisibleForTesting; - import com.google.common.base.Charsets; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java index 3471848..b4e0202 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java @@ -1818,15 +1501,15 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdf index d459d30..6327a83 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java -@@ -37,7 +37,7 @@ - import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator; +@@ -32,7 +32,7 @@ import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator; import org.apache.hadoop.util.VersionInfo; + import org.codehaus.jackson.map.ObjectMapper; import org.junit.Test; -import org.mortbay.util.ajax.JSON; +import org.eclipse.jetty.util.ajax.JSON; - /** - * Class for testing {@link NameNodeMXBean} implementation + import javax.management.MBeanServer; + import javax.management.ObjectName; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java index 0f22e9a..bff549a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java @@ -1853,19 +1536,6 @@ index f24b801..28d05b4 100644 /* * Mock input stream class that always outputs the current position of the stream. -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java -index 2bce30f..eaf836d 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java -@@ -38,7 +38,7 @@ - import org.apache.hadoop.util.Time; - import org.junit.Assert; - import org.junit.Test; --import org.mortbay.util.ajax.JSON; -+import org.eclipse.jetty.util.ajax.JSON; - - import com.google.common.collect.Lists; - diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java index 7029f42..c7023c9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java @@ -1883,15 +1553,102 @@ diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-c index 981e6ff..7864756 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java -@@ -30,7 +30,7 @@ +@@ -30,7 +30,8 @@ import org.apache.hadoop.mapred.JobContext; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; -import org.mortbay.log.Log; +import org.eclipse.jetty.util.log.Log; ++import org.eclipse.jetty.util.log.Logger; /** *

This class handles job end notification. Submitters of jobs can choose to +@@ -48,6 +49,7 @@ import org.eclipse.jetty.util.log.Log; + public class JobEndNotifier implements Configurable { + private static final String JOB_ID = "$jobId"; + private static final String JOB_STATUS = "$jobStatus"; ++ private static Logger log = Log.getLogger(JobEndNotifier.class); + + private Configuration conf; + protected String userUrl; +@@ -101,10 +103,10 @@ public class JobEndNotifier implements Configurable { + int port = Integer.parseInt(portConf); + proxyToUse = new Proxy(proxyType, + new InetSocketAddress(hostname, port)); +- Log.info("Job end notification using proxy type \"" + proxyType + ++ log.info("Job end notification using proxy type \"" + proxyType + + "\" hostname \"" + hostname + "\" and port \"" + port + "\""); + } catch(NumberFormatException nfe) { +- Log.warn("Job end notification couldn't parse configured proxy's port " ++ log.warn("Job end notification couldn't parse configured proxy's port " + + portConf + ". Not going to use a proxy"); + } + } +@@ -121,23 +123,23 @@ public class JobEndNotifier implements Configurable { + protected boolean notifyURLOnce() { + boolean success = false; + try { +- Log.info("Job end notification trying " + urlToNotify); ++ log.info("Job end notification trying " + urlToNotify); + HttpURLConnection conn = + (HttpURLConnection) urlToNotify.openConnection(proxyToUse); + conn.setConnectTimeout(timeout); + conn.setReadTimeout(timeout); + conn.setAllowUserInteraction(false); + if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) { +- Log.warn("Job end notification to " + urlToNotify +" failed with code: " ++ log.warn("Job end notification to " + urlToNotify +" failed with code: " + + conn.getResponseCode() + " and message \"" + conn.getResponseMessage() + +"\""); + } + else { + success = true; +- Log.info("Job end notification to " + urlToNotify + " succeeded"); ++ log.info("Job end notification to " + urlToNotify + " succeeded"); + } + } catch(IOException ioe) { +- Log.warn("Job end notification to " + urlToNotify + " failed", ioe); ++ log.warn("Job end notification to " + urlToNotify + " failed", ioe); + } + return success; + } +@@ -152,7 +154,7 @@ public class JobEndNotifier implements Configurable { + throws InterruptedException { + // Do we need job-end notification? + if (userUrl == null) { +- Log.info("Job end notification URL not set, skipping."); ++ log.info("Job end notification URL not set, skipping."); + return; + } + +@@ -168,23 +170,23 @@ public class JobEndNotifier implements Configurable { + try { + urlToNotify = new URL(userUrl); + } catch (MalformedURLException mue) { +- Log.warn("Job end notification couldn't parse " + userUrl, mue); ++ log.warn("Job end notification couldn't parse " + userUrl, mue); + return; + } + + // Send notification + boolean success = false; + while (numTries-- > 0 && !success) { +- Log.info("Job end notification attempts left " + numTries); ++ log.info("Job end notification attempts left " + numTries); + success = notifyURLOnce(); + if (!success) { + Thread.sleep(waitInterval); + } + } + if (!success) { +- Log.warn("Job end notification failed to notify : " + urlToNotify); ++ log.warn("Job end notification failed to notify : " + urlToNotify); + } else { +- Log.info("Job end notification succeeded for " + jobReport.getJobId()); ++ log.info("Job end notification succeeded for " + jobReport.getJobId()); + } + } + } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java index 8891ec7..1dd369a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java @@ -1984,25 +1741,99 @@ diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-c index c803a7f..393d385 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java -@@ -111,7 +111,7 @@ +@@ -118,6 +118,7 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; + import org.jboss.netty.handler.codec.frame.TooLongFrameException; + import org.jboss.netty.handler.codec.http.DefaultHttpResponse; + import org.jboss.netty.handler.codec.http.HttpChunkAggregator; ++import org.jboss.netty.handler.codec.http.HttpHeaders; + import org.jboss.netty.handler.codec.http.HttpRequest; + import org.jboss.netty.handler.codec.http.HttpRequestDecoder; + import org.jboss.netty.handler.codec.http.HttpResponse; +@@ -127,7 +127,6 @@ import org.jboss.netty.handler.codec.http.QueryStringDecoder; import org.jboss.netty.handler.ssl.SslHandler; import org.jboss.netty.handler.stream.ChunkedWriteHandler; import org.jboss.netty.util.CharsetUtil; -import org.mortbay.jetty.HttpHeaders; -+import org.eclipse.jetty.http.HttpHeaders; + import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; - import com.google.common.util.concurrent.ThreadFactoryBuilder; +@@ -830,9 +830,9 @@ public class ShuffleHandler extends AuxiliaryService { + } + // Check whether the shuffle version is compatible + if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals( +- request.getHeader(ShuffleHeader.HTTP_HEADER_NAME)) ++ request.headers().get(ShuffleHeader.HTTP_HEADER_NAME)) + || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals( +- request.getHeader(ShuffleHeader.HTTP_HEADER_VERSION))) { ++ request.headers().get(ShuffleHeader.HTTP_HEADER_VERSION))) { + sendError(ctx, "Incompatible shuffle request version", BAD_REQUEST); + } + final Map> q = +@@ -1044,12 +1044,12 @@ public class ShuffleHandler extends AuxiliaryService { + boolean keepAliveParam, long contentLength) { + if (!connectionKeepAliveEnabled && !keepAliveParam) { + LOG.info("Setting connection close header..."); +- response.setHeader(HttpHeaders.CONNECTION, CONNECTION_CLOSE); ++ response.headers().set(HttpHeaders.Names.CONNECTION, CONNECTION_CLOSE); + } else { +- response.setHeader(HttpHeaders.CONTENT_LENGTH, ++ response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, + String.valueOf(contentLength)); +- response.setHeader(HttpHeaders.CONNECTION, HttpHeaders.KEEP_ALIVE); +- response.setHeader(HttpHeaders.KEEP_ALIVE, "timeout=" ++ response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); ++ response.headers().set(HttpHeaders.Values.KEEP_ALIVE, "timeout=" + + connectionKeepAliveTimeOut); + LOG.info("Content Length in shuffle : " + contentLength); + } +@@ -1077,7 +1077,7 @@ public class ShuffleHandler extends AuxiliaryService { + String enc_str = SecureShuffleUtils.buildMsgFrom(requestUri); + // hash from the fetcher + String urlHashStr = +- request.getHeader(SecureShuffleUtils.HTTP_HEADER_URL_HASH); ++ request.headers().get(SecureShuffleUtils.HTTP_HEADER_URL_HASH); + if (urlHashStr == null) { + LOG.info("Missing header hash for " + appid); + throw new IOException("fetcher cannot be authenticated"); +@@ -1093,11 +1093,11 @@ public class ShuffleHandler extends AuxiliaryService { + String reply = + SecureShuffleUtils.generateHash(urlHashStr.getBytes(Charsets.UTF_8), + tokenSecret); +- response.setHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply); ++ response.headers().set(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply); + // Put shuffle version into http header +- response.setHeader(ShuffleHeader.HTTP_HEADER_NAME, ++ response.headers().set(ShuffleHeader.HTTP_HEADER_NAME, + ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); +- response.setHeader(ShuffleHeader.HTTP_HEADER_VERSION, ++ response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION, + ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); + if (LOG.isDebugEnabled()) { + int len = reply.length(); +@@ -1163,11 +1163,11 @@ public class ShuffleHandler extends AuxiliaryService { + protected void sendError(ChannelHandlerContext ctx, String message, + HttpResponseStatus status) { + HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status); +- response.setHeader(CONTENT_TYPE, "text/plain; charset=UTF-8"); ++ response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8"); + // Put shuffle version into http header +- response.setHeader(ShuffleHeader.HTTP_HEADER_NAME, ++ response.headers().set(ShuffleHeader.HTTP_HEADER_NAME, + ShuffleHeader.DEFAULT_HTTP_HEADER_NAME); +- response.setHeader(ShuffleHeader.HTTP_HEADER_VERSION, ++ response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION, + ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION); + response.setContent( + ChannelBuffers.copiedBuffer(message, CharsetUtil.UTF_8)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java index 420c428..3a3257e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java -@@ -78,7 +78,7 @@ - import org.jboss.netty.handler.codec.http.HttpResponseStatus; - import org.junit.Assert; - import org.junit.Test; +@@ -94,7 +94,6 @@ import org.junit.Test; + import org.mockito.invocation.InvocationOnMock; + import org.mockito.stubbing.Answer; + import org.mockito.Mockito; -import org.mortbay.jetty.HttpHeaders; -+import org.eclipse.jetty.http.HttpHeaders; public class TestShuffleHandler { static final long MiB = 1024 * 1024; @@ -2021,25 +1852,6 @@ index 8ae5809..b7da2bc 100644 org.apache.ant -@@ -78,16 +78,8 @@ - commons-el - - -- tomcat -- jasper-runtime -- -- -- tomcat -- jasper-compiler -- -- -- org.mortbay.jetty -- jsp-2.1-jetty -+ org.apache.tomcat -+ tomcat-jasper - - - diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml index 8f1d2b0..465c2df 100644 --- a/hadoop-mapreduce-project/pom.xml @@ -2055,25 +1867,6 @@ index 8f1d2b0..465c2df 100644 org.apache.ant -@@ -87,16 +87,8 @@ - commons-el - - -- tomcat -- jasper-runtime -- -- -- tomcat -- jasper-compiler -- -- -- org.mortbay.jetty -- jsp-2.1-jetty -+ org.apache.tomcat -+ tomcat-jasper - - - @@ -136,6 +128,12 @@ com.sun.jersey @@ -2091,21 +1884,21 @@ diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index b315e2b..e9b072d 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml -@@ -59,7 +59,7 @@ - 1.7.4 +@@ -79,7 +79,7 @@ + 2.7.1 + 3.0.0 - -- 1.9 -+ 1.17.1 +- 6.0.44 ++ 7.0.37 - - -@@ -360,29 +360,17 @@ + + 1.7 +@@ -455,23 +455,17 @@ javax.servlet servlet-api - 2.5 -+ 3.0-alpha-1 ++ 3.1.0 - org.mortbay.jetty @@ -2119,24 +1912,38 @@ index b315e2b..e9b072d 100644 - + org.eclipse.jetty + jetty-server -+ 8.1.14.v20131031 ++ 8 - org.mortbay.jetty + org.eclipse.jetty jetty-util - 6.1.26 -- -- -- -- org.glassfish -- javax.servlet ++ 8 + + + org.apache.tomcat.embed +@@ -484,15 +484,15 @@ + 7.0.55 + + +- javax.servlet.jsp +- jsp-api +- 2.1 ++ org.glassfish.web ++ javax.servlet.jsp ++ 2.2.5 + + + + org.glassfish + javax.servlet - 3.1 + 8.1.14.v20131031 -@@ -421,6 +409,17 @@ +@@ -531,6 +531,17 @@ com.sun.jersey jersey-server ${jersey.version} @@ -2154,62 +1961,19 @@ index b315e2b..e9b072d 100644 -@@ -472,34 +471,22 @@ +@@ -588,9 +582,9 @@ - org.mortbay.jetty - jetty-servlet-tester - 6.1.26 -- -- -- tomcat -- jasper-compiler -- 5.5.23 -- -- -- javax.servlet -- jsp-api -- -- -- ant -- ant -- -- + org.eclipse.jetty + test-jetty-servlet -+ 8.1.14.v20131031 - -+ - -- tomcat -- jasper-runtime -- 5.5.23 -+ org.apache.tomcat -+ tomcat-servlet-api -+ 7.0.37 - - -- javax.servlet.jsp -- jsp-api -- 2.1 -+ org.glassfish.web -+ javax.servlet.jsp -+ 2.2.5 - - - commons-el -@@ -728,7 +715,7 @@ - - org.apache.bookkeeper - bookkeeper-server -- 4.0.0 -+ 4.2.1 - compile ++ 8 + commons-logging diff --git a/hadoop-tools/hadoop-sls/pom.xml b/hadoop-tools/hadoop-sls/pom.xml index 6166725..e0d3ee7 100644 --- a/hadoop-tools/hadoop-sls/pom.xml @@ -2240,8 +2004,8 @@ diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/we index 123ccea..e961e58 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java -@@ -32,10 +32,11 @@ - import org.apache.commons.io.FileUtils; +@@ -34,10 +34,11 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; + import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event .SchedulerEventType; -import org.mortbay.jetty.Handler; @@ -2256,14 +2020,14 @@ index 123ccea..e961e58 100644 import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics; -@@ -45,7 +46,6 @@ +@@ -47,7 +48,6 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; -import org.mortbay.jetty.handler.ResourceHandler; - public class SLSWebApp extends HttpServlet { - private static final long serialVersionUID = 1905162041950251407L; + @Private + @Unstable @@ -108,8 +108,9 @@ public void start() throws Exception { Handler handler = new AbstractHandler() { @@ -2285,68 +2049,38 @@ index 123ccea..e961e58 100644 } else // json request if (target.equals("/simulateMetrics")) { -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml -index fe2955a..0179f7b 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml -@@ -64,10 +64,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java +index 08e71c1..461c43c 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java +@@ -93,7 +93,7 @@ import org.junit.BeforeClass; + import org.junit.Test; + import org.mockito.invocation.InvocationOnMock; + import org.mockito.stubbing.Answer; +-import org.mortbay.log.Log; ++import org.eclipse.jetty.util.log.Log; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml -index c639de8..37c0908 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml -@@ -51,10 +51,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - + import com.google.common.base.Supplier; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml -index 35d1a42..48c0d50 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml -@@ -63,10 +63,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml -index 82d66cb..cc7606f 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml -@@ -48,10 +48,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java +index 1efb54c..1b3463b 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java +@@ -69,7 +69,7 @@ import org.apache.hadoop.yarn.util.Records; + import org.junit.Assert; + import org.junit.Before; + import org.junit.Test; +-import org.mortbay.log.Log; ++import org.eclipse.jetty.util.log.Log; -@@ -76,7 +72,7 @@ - log4j + import com.google.common.collect.ImmutableSet; + +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml +index a19a78c..83aa759 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml +@@ -67,7 +67,7 @@ + commons-codec - org.mortbay.jetty @@ -2354,48 +2088,7 @@ index 82d66cb..cc7606f 100644 jetty-util -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java -index 08e71c1..461c43c 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestAMRMClient.java -@@ -83,7 +83,7 @@ - import org.junit.Test; - import org.mockito.invocation.InvocationOnMock; - import org.mockito.stubbing.Answer; --import org.mortbay.log.Log; -+import org.eclipse.jetty.util.log.Log; - - public class TestAMRMClient { - static Configuration conf = null; -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java -index 1efb54c..1b3463b 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java -@@ -62,7 +62,7 @@ - import org.apache.hadoop.yarn.util.Records; - import org.junit.Before; - import org.junit.Test; --import org.mortbay.log.Log; -+import org.eclipse.jetty.util.log.Log; - - import org.apache.commons.cli.Options; - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml -index a19a78c..83aa759 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml -@@ -51,10 +51,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - -@@ -151,6 +147,12 @@ +@@ -163,6 +163,12 @@ com.sun.jersey jersey-server @@ -2422,51 +2115,10 @@ index f8c6f55..71df06b 100644 webapp.setConf(conf); webapp.setHttpServer(server); -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml -index 8a4e6f5..c785145 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml -@@ -58,10 +58,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml -index 294f969..24d7706 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/pom.xml -@@ -51,10 +51,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml index 0fbafd2..5fe4206 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml -@@ -53,10 +53,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - @@ -99,7 +95,7 @@ jersey-client @@ -2492,14 +2144,14 @@ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-serv index bfb0e87..f9fac8e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java -@@ -104,7 +104,7 @@ +@@ -124,7 +124,7 @@ import org.junit.Assert; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.mortbay.util.MultiException; +import org.eclipse.jetty.util.MultiException; - + import com.google.common.base.Supplier; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java index 72c1f6f..d272614 100644 @@ -2549,17 +2201,6 @@ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-serv index 3e78e02..358a534 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml -@@ -55,10 +55,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - @@ -161,7 +157,7 @@ jersey-client @@ -2648,37 +2289,11 @@ index da2e2b1..77cdfa9 100644 } public void testNodesHelper(String path, String media) throws JSONException, -diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml -index 44076eb..065bf72 100644 ---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml -+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml -@@ -50,10 +50,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml index 10f243c..af23544 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml -@@ -56,10 +56,6 @@ - tomcat - jasper-compiler - -- -- org.mortbay.jetty -- jsp-2.1-jetty -- - - - -@@ -109,8 +105,8 @@ +@@ -109,8 +105,9 @@ commons-logging @@ -2686,6 +2301,7 @@ index 10f243c..af23544 100644 - jetty + org.eclipse.jetty + jetty-server ++ 8 @@ -2693,7 +2309,7 @@ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-serv index 1be0115..420a41c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java -@@ -59,9 +59,9 @@ +@@ -57,9 +57,10 @@ import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -2703,10 +2319,11 @@ index 1be0115..420a41c 100644 +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; ++ + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; - /** - * Test the WebAppProxyServlet and WebAppProxy. For back end use simple web -@@ -81,7 +81,7 @@ +@@ -81,7 +82,7 @@ public class TestWebAppProxyServlet { @BeforeClass public static void start() throws Exception { server = new Server(0); @@ -2714,4 +2331,135 @@ index 1be0115..420a41c 100644 + ServletContextHandler context = new ServletContextHandler(); context.setContextPath("/foo"); server.setHandler(context); - context.addServlet(new ServletHolder(TestServlet.class), "/bar/"); + context.addServlet(new ServletHolder(TestServlet.class), "/bar"); +diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java +index 52ab7ad..6b07871 100644 +--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java ++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java +@@ -18,7 +18,7 @@ + + package org.apache.hadoop.security.ssl; + +-import org.mortbay.jetty.security.SslSocketConnector; ++import org.eclipse.jetty.server.ssl.SslSocketConnector; + + import javax.net.ssl.SSLServerSocket; + import java.io.IOException; +diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml +index c479b67..a065485 100644 +--- a/hadoop-common-project/hadoop-kms/pom.xml ++++ b/hadoop-common-project/hadoop-kms/pom.xml +@@ -84,11 +84,6 @@ + provided + + +- org.mortbay.jetty +- jetty +- test +- +- + org.apache.hadoop + hadoop-common + compile +@@ -122,20 +117,20 @@ + jsp-api + + +- org.mortbay.jetty +- jetty ++ org.eclipse.jetty ++ jetty-webapp + + +- org.mortbay.jetty ++ org.eclipse.jetty + jetty-util + + +- org.mortbay.jetty +- jsp-api-2.1 ++ org.eclipse.jetty ++ jetty-servlet + + +- org.mortbay.jetty +- servlet-api-2.5 ++ org.eclipse.jetty ++ jetty-server + + + net.java.dev.jets3t +@@ -178,7 +173,7 @@ + compile + + +- org.mortbay.jetty ++ org.eclipse.jetty + jetty-util + compile + +diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java +index d2cca0e..637cfd6 100644 +--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java ++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java +@@ -31,7 +31,8 @@ import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeat + import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; + + import com.google.common.base.Preconditions; +-import org.mortbay.log.Log; ++import org.eclipse.jetty.util.log.Logger; ++import org.eclipse.jetty.util.log.Log; + + /** + * An anonymous reference to an inode. +@@ -567,7 +568,8 @@ public abstract class INodeReference extends INode { + try { + ref.addSpaceConsumed(counts.negation(), true); + } catch (QuotaExceededException e) { +- Log.warn("Should not have QuotaExceededException"); ++ Logger log = Log.getLogger(INodeReference.class); ++ log.warn("Should not have QuotaExceededException"); + } + } + +diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java +index 0d32758..658fa3d 100644 +--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java ++++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java +@@ -63,7 +63,7 @@ import org.apache.http.client.utils.URIBuilder; + + import com.google.common.annotations.VisibleForTesting; + import com.google.common.collect.Lists; +-import org.mortbay.jetty.EofException; ++import org.eclipse.jetty.io.EofException; + + /** + * This class provides fetching a specified file from the NameNode. +diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java +index 5dc0963..63c9647 100644 +--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java ++++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java +@@ -62,7 +62,7 @@ import org.apache.hadoop.fs.azure.metrics.ErrorMetricUpdater; + import org.apache.hadoop.fs.azure.metrics.ResponseReceivedMetricUpdater; + import org.apache.hadoop.fs.permission.FsPermission; + import org.apache.hadoop.fs.permission.PermissionStatus; +-import org.mortbay.util.ajax.JSON; ++import org.eclipse.jetty.util.ajax.JSON; + + import com.google.common.annotations.VisibleForTesting; + import com.microsoft.azure.storage.CloudStorageAccount; +diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +index 7fb900d..27582fb 100644 +--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java ++++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +@@ -55,8 +55,8 @@ import org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitialize + import org.apache.hadoop.yarn.webapp.WebApp; + import org.apache.hadoop.yarn.webapp.WebApps; + import org.apache.hadoop.yarn.webapp.util.WebAppUtils; +-import org.mortbay.jetty.servlet.FilterHolder; +-import org.mortbay.jetty.webapp.WebAppContext; ++import org.eclipse.jetty.servlet.FilterHolder; ++import org.eclipse.jetty.webapp.WebAppContext; + + import com.google.common.annotations.VisibleForTesting; + diff --git a/hadoop-guava.patch b/hadoop-guava.patch index 3e39932..a2abab3 100644 --- a/hadoop-guava.patch +++ b/hadoop-guava.patch @@ -1,411 +1,26 @@ -diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java -index f7932a6..ec3d9cf 100644 ---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java -+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java -@@ -22,6 +22,7 @@ - import java.util.List; - import java.util.Map; - import java.util.Set; -+import java.util.concurrent.TimeUnit; - - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; -@@ -153,7 +154,7 @@ public String toString() { - private class Monitor implements Runnable { - @Override - public void run() { -- Stopwatch sw = new Stopwatch(); -+ Stopwatch sw = Stopwatch.createUnstarted(); - Map gcTimesBeforeSleep = getGcTimes(); - while (shouldRun) { - sw.reset().start(); -@@ -162,7 +163,7 @@ public void run() { - } catch (InterruptedException ie) { - return; - } -- long extraSleepTime = sw.elapsedMillis() - SLEEP_INTERVAL_MS; -+ long extraSleepTime = sw.elapsed(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS; - Map gcTimesAfterSleep = getGcTimes(); - - if (extraSleepTime > warnThresholdMs) { -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java -index 8588de5..cb0dbae 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java -@@ -133,7 +133,7 @@ - /** - * Stopwatch which starts counting on each heartbeat that is sent - */ -- private final Stopwatch lastHeartbeatStopwatch = new Stopwatch(); -+ private final Stopwatch lastHeartbeatStopwatch = Stopwatch.createUnstarted(); - - private static final long HEARTBEAT_INTERVAL_MILLIS = 1000; - -@@ -435,7 +435,7 @@ private void throwIfOutOfSync() - * written. - */ - private void heartbeatIfNecessary() throws IOException { -- if (lastHeartbeatStopwatch.elapsedMillis() > HEARTBEAT_INTERVAL_MILLIS || -+ if (lastHeartbeatStopwatch.elapsed(TimeUnit.MILLISECONDS) > HEARTBEAT_INTERVAL_MILLIS || - !lastHeartbeatStopwatch.isRunning()) { - try { - getProxy().heartbeat(createReqInfo()); -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java -index c117ee8..82f01da 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java -@@ -68,7 +68,6 @@ - import com.google.common.base.Stopwatch; - import com.google.common.collect.ImmutableList; - import com.google.common.collect.Range; --import com.google.common.collect.Ranges; - import com.google.protobuf.TextFormat; - - /** -@@ -374,15 +373,15 @@ synchronized void journal(RequestInfo reqInfo, - - curSegment.writeRaw(records, 0, records.length); - curSegment.setReadyToFlush(); -- Stopwatch sw = new Stopwatch(); -+ Stopwatch sw = Stopwatch.createUnstarted(); - sw.start(); - curSegment.flush(shouldFsync); - sw.stop(); - -- metrics.addSync(sw.elapsedTime(TimeUnit.MICROSECONDS)); -- if (sw.elapsedTime(TimeUnit.MILLISECONDS) > WARN_SYNC_MILLIS_THRESHOLD) { -+ metrics.addSync(sw.elapsed(TimeUnit.MICROSECONDS)); -+ if (sw.elapsed(TimeUnit.MILLISECONDS) > WARN_SYNC_MILLIS_THRESHOLD) { - LOG.warn("Sync of transaction range " + firstTxnId + "-" + lastTxnId + -- " took " + sw.elapsedTime(TimeUnit.MILLISECONDS) + "ms"); -+ " took " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); - } - - if (isLagging) { -@@ -853,7 +852,7 @@ public synchronized void acceptRecovery(RequestInfo reqInfo, - private Range txnRange(SegmentStateProto seg) { - Preconditions.checkArgument(seg.hasEndTxId(), - "invalid segment: %s", seg); -- return Ranges.closed(seg.getStartTxId(), seg.getEndTxId()); -+ return Range.closed(seg.getStartTxId(), seg.getEndTxId()); - } - - /** -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java -index 5075da9..0d868d4 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java -@@ -62,7 +62,7 @@ - - import com.google.common.collect.Lists; - import com.google.common.collect.Maps; --import com.google.common.io.LimitInputStream; -+import com.google.common.io.ByteStreams; - import com.google.protobuf.CodedOutputStream; - - /** -@@ -215,7 +215,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) { - - for (FileSummary.Section s : sections) { - channel.position(s.getOffset()); -- InputStream in = new BufferedInputStream(new LimitInputStream(fin, -+ InputStream in = new BufferedInputStream(ByteStreams.limit(fin, - s.getLength())); - - in = FSImageUtil.wrapInputStreamForCompression(conf, -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java -index c8033dd..b312bfe 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionCalculator.java -@@ -33,7 +33,7 @@ - import org.apache.hadoop.io.IOUtils; - - import com.google.common.base.Preconditions; --import com.google.common.io.LimitInputStream; -+import com.google.common.io.ByteStreams; - - /** - * This is the tool for analyzing file sizes in the namespace image. In order to -@@ -106,7 +106,7 @@ void visit(RandomAccessFile file) throws IOException { - - in.getChannel().position(s.getOffset()); - InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, -- summary.getCodec(), new BufferedInputStream(new LimitInputStream( -+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit( - in, s.getLength()))); - run(is); - output(); -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java -index d80fcf1..e025f82 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsrPBImage.java -@@ -50,7 +50,7 @@ - - import com.google.common.collect.Lists; - import com.google.common.collect.Maps; --import com.google.common.io.LimitInputStream; -+import com.google.common.io.ByteStreams; - - /** - * LsrPBImage displays the blocks of the namespace in a format very similar -@@ -110,7 +110,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) { - for (FileSummary.Section s : sections) { - fin.getChannel().position(s.getOffset()); - InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, -- summary.getCodec(), new BufferedInputStream(new LimitInputStream( -+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit( - fin, s.getLength()))); - - switch (SectionName.fromString(s.getName())) { -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java -index 99617b8..c613591 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java -@@ -52,7 +52,7 @@ - import org.apache.hadoop.io.IOUtils; - - import com.google.common.collect.Lists; --import com.google.common.io.LimitInputStream; -+import com.google.common.io.ByteStreams; - - /** - * PBImageXmlWriter walks over an fsimage structure and writes out -@@ -100,7 +100,7 @@ public int compare(FileSummary.Section s1, FileSummary.Section s2) { - for (FileSummary.Section s : sections) { - fin.getChannel().position(s.getOffset()); - InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, -- summary.getCodec(), new BufferedInputStream(new LimitInputStream( -+ summary.getCodec(), new BufferedInputStream(ByteStreams.limit( - fin, s.getLength()))); - - switch (SectionName.fromString(s.getName())) { -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java -index 132218c..09d42e1 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferKeepalive.java -@@ -47,7 +47,7 @@ - import org.junit.Before; - import org.junit.Test; - --import com.google.common.io.NullOutputStream; -+import com.google.common.io.ByteStreams; - - public class TestDataTransferKeepalive { - final Configuration conf = new HdfsConfiguration(); -@@ -224,7 +224,7 @@ public void testManyClosedSocketsInCache() throws Exception { - stms[i] = fs.open(TEST_FILE); - } - for (InputStream stm : stms) { -- IOUtils.copyBytes(stm, new NullOutputStream(), 1024); -+ IOUtils.copyBytes(stm, ByteStreams.nullOutputStream(), 1024); - } - } finally { - IOUtils.cleanup(null, stms); -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java -index 92c7672..aa5c351 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultiThreadedHflush.java -@@ -100,10 +100,10 @@ public void run() { - } - - private void doAWrite() throws IOException { -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - stm.write(toWrite); - stm.hflush(); -- long micros = sw.elapsedTime(TimeUnit.MICROSECONDS); -+ long micros = sw.elapsed(TimeUnit.MICROSECONDS); - quantiles.insert(micros); - } - } -@@ -276,12 +276,12 @@ public int run(String args[]) throws Exception { - int replication = conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY, - DFSConfigKeys.DFS_REPLICATION_DEFAULT); - -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - test.doMultithreadedWrites(conf, p, numThreads, writeSize, numWrites, - replication); - sw.stop(); - -- System.out.println("Finished in " + sw.elapsedMillis() + "ms"); -+ System.out.println("Finished in " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); - System.out.println("Latency quantiles (in microseconds):\n" + - test.quantiles); - return 0; -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java -index 10b6b79..9fbcf82 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java -@@ -27,6 +27,7 @@ - import java.net.HttpURLConnection; - import java.net.URL; - import java.util.concurrent.ExecutionException; -+import java.util.concurrent.TimeUnit; - - import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.fs.FileUtil; -@@ -325,11 +326,11 @@ private void doPerfTest(int editsSize, int numEdits) throws Exception { - ch.setEpoch(1); - ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get(); - -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - for (int i = 1; i < numEdits; i++) { - ch.sendEdits(1L, i, 1, data).get(); - } -- long time = sw.elapsedMillis(); -+ long time = sw.elapsed(TimeUnit.MILLISECONDS); - - System.err.println("Wrote " + numEdits + " batches of " + editsSize + - " bytes in " + time + "ms"); -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java -index a1e49cc..44751b0 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestChunkedArrayList.java -@@ -20,6 +20,7 @@ - import static org.junit.Assert.*; - - import java.util.ArrayList; -+import java.util.concurrent.TimeUnit; - - import org.junit.Test; - -@@ -69,24 +70,22 @@ public void testPerformance() { - System.gc(); - { - ArrayList arrayList = new ArrayList(); -- Stopwatch sw = new Stopwatch(); -- sw.start(); -+ Stopwatch sw = Stopwatch.createStarted(); - for (int i = 0; i < numElems; i++) { - arrayList.add(obj); - } -- System.out.println(" ArrayList " + sw.elapsedMillis()); -+ System.out.println(" ArrayList " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - - // test ChunkedArrayList - System.gc(); - { - ChunkedArrayList chunkedList = new ChunkedArrayList(); -- Stopwatch sw = new Stopwatch(); -- sw.start(); -+ Stopwatch sw = Stopwatch.createStarted(); - for (int i = 0; i < numElems; i++) { - chunkedList.add(obj); - } -- System.out.println("ChunkedArrayList " + sw.elapsedMillis()); -+ System.out.println("ChunkedArrayList " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - } - } -diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java -index 9863427..07854a1 100644 ---- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java -+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java -@@ -28,6 +28,7 @@ - import java.util.List; - import java.util.Map; - import java.util.Set; -+import java.util.concurrent.TimeUnit; - - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; -@@ -223,7 +224,7 @@ protected void addInputPathRecursively(List result, - org.apache.hadoop.mapreduce.lib.input.FileInputFormat.LIST_STATUS_NUM_THREADS, - org.apache.hadoop.mapreduce.lib.input.FileInputFormat.DEFAULT_LIST_STATUS_NUM_THREADS); - -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - if (numThreads == 1) { - List locatedFiles = singleThreadedListStatus(job, dirs, inputFilter, recursive); - result = locatedFiles.toArray(new FileStatus[locatedFiles.size()]); -@@ -242,7 +243,7 @@ protected void addInputPathRecursively(List result, - - sw.stop(); - if (LOG.isDebugEnabled()) { -- LOG.debug("Time taken to get FileStatuses: " + sw.elapsedMillis()); -+ LOG.debug("Time taken to get FileStatuses: " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - LOG.info("Total input paths to process : " + result.length); - return result; -@@ -300,7 +301,7 @@ protected FileSplit makeSplit(Path file, long start, long length, - * they're too big.*/ - public InputSplit[] getSplits(JobConf job, int numSplits) - throws IOException { -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - FileStatus[] files = listStatus(job); - - // Save the number of input files for metrics/loadgen -@@ -362,7 +363,7 @@ protected FileSplit makeSplit(Path file, long start, long length, - sw.stop(); - if (LOG.isDebugEnabled()) { - LOG.debug("Total # of splits generated by getSplits: " + splits.size() -- + ", TimeTaken: " + sw.elapsedMillis()); -+ + ", TimeTaken: " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - return splits.toArray(new FileSplit[splits.size()]); - } -diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java -index 5f32f11..a4f293c 100644 ---- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java -+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java -@@ -21,6 +21,7 @@ - import java.io.IOException; - import java.util.ArrayList; - import java.util.List; -+import java.util.concurrent.TimeUnit; - - import org.apache.commons.logging.Log; - import org.apache.commons.logging.LogFactory; -@@ -258,7 +259,7 @@ public static PathFilter getInputPathFilter(JobContext context) { - - int numThreads = job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS, - DEFAULT_LIST_STATUS_NUM_THREADS); -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - if (numThreads == 1) { - result = singleThreadedListStatus(job, dirs, inputFilter, recursive); - } else { -@@ -275,7 +276,7 @@ public static PathFilter getInputPathFilter(JobContext context) { - - sw.stop(); - if (LOG.isDebugEnabled()) { -- LOG.debug("Time taken to get FileStatuses: " + sw.elapsedMillis()); -+ LOG.debug("Time taken to get FileStatuses: " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - LOG.info("Total input paths to process : " + result.size()); - return result; -@@ -366,7 +367,7 @@ protected FileSplit makeSplit(Path file, long start, long length, - * @throws IOException - */ - public List getSplits(JobContext job) throws IOException { -- Stopwatch sw = new Stopwatch().start(); -+ Stopwatch sw = Stopwatch.createStarted(); - long minSize = Math.max(getFormatMinSplitSize(), getMinSplitSize(job)); - long maxSize = getMaxSplitSize(job); - -@@ -414,7 +415,7 @@ protected FileSplit makeSplit(Path file, long start, long length, - sw.stop(); - if (LOG.isDebugEnabled()) { - LOG.debug("Total # of splits generated by getSplits: " + splits.size() -- + ", TimeTaken: " + sw.elapsedMillis()); -+ + ", TimeTaken: " + sw.elapsed(TimeUnit.MILLISECONDS)); - } - return splits; - } diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index b315e2b..9ad8bcd 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml -@@ -310,7 +310,7 @@ +@@ -400,7 +400,7 @@ com.google.guava guava - 11.0.2 -+ 17.0 ++ 18.0 - commons-cli + com.google.code.gson +diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java +index d55c80b..4505aa9 100644 +--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java ++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java +@@ -67,7 +67,7 @@ class XAttrCommands extends FsCommand { + "0x and 0s, respectively.\n" + + ": The file or directory.\n"; + private final static Function enValueOfFunc = +- Enums.valueOfFunction(XAttrCodec.class); ++ Enums.stringConverter(XAttrCodec.class); + + private String name = null; + private boolean dump = false; diff --git a/hadoop-jersey1.patch b/hadoop-jersey1.patch new file mode 100644 index 0000000..afb77be --- /dev/null +++ b/hadoop-jersey1.patch @@ -0,0 +1,13 @@ +diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml +index b646304..97ac7c2 100644 +--- a/hadoop-project/pom.xml ++++ b/hadoop-project/pom.xml +@@ -64,7 +64,7 @@ + 1.7.4 + + +- 1.9 ++ 1 + + + 1.9.13 diff --git a/hadoop-maven.patch b/hadoop-maven.patch deleted file mode 100644 index 0026ae3..0000000 --- a/hadoop-maven.patch +++ /dev/null @@ -1,44 +0,0 @@ -diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml -index 7cf67a3..c090916 100644 ---- a/hadoop-common-project/hadoop-common/pom.xml -+++ b/hadoop-common-project/hadoop-common/pom.xml -@@ -364,16 +364,6 @@ - - - -- org.apache.maven.plugins -- maven-surefire-plugin -- -- -- ${startKdc} -- ${kdc.resource.dir} -- -- -- -- - org.apache.avro - avro-maven-plugin - -@@ -480,6 +470,10 @@ - org.apache.maven.plugins - maven-surefire-plugin - -+ -+ ${startKdc} -+ ${kdc.resource.dir} -+ - - - listener -diff --git a/pom.xml b/pom.xml -index 13dbf49..ad84034 100644 ---- a/pom.xml -+++ b/pom.xml -@@ -387,6 +387,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs - - org.apache.maven.plugins - maven-javadoc-plugin -+ 2.8.1 - false - - diff --git a/hadoop-netty-3-Final.patch b/hadoop-netty-3-Final.patch index 7980e21..c701c33 100644 --- a/hadoop-netty-3-Final.patch +++ b/hadoop-netty-3-Final.patch @@ -1,21 +1,3 @@ -diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml -index 9b267fe..0ce916d 100644 ---- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml -+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml -@@ -38,12 +38,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> - - - -- - - org.jboss.netty - netty -- 3.2.4.Final -+ 3.9.3.Final - - - diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index b315e2b..a9da3aa 100644 --- a/hadoop-project/pom.xml @@ -25,7 +7,7 @@ index b315e2b..a9da3aa 100644 io.netty netty - 3.6.2.Final -+ 3.9.3.Final ++ 3.10.6.Final diff --git a/hadoop-openssl.patch b/hadoop-openssl.patch new file mode 100644 index 0000000..332e930 --- /dev/null +++ b/hadoop-openssl.patch @@ -0,0 +1,37 @@ +diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c +index 5cb5bba..5294ec7 100644 +--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c ++++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c +@@ -253,14 +253,15 @@ JNIEXPORT jlong JNICALL Java_org_apache_hadoop_crypto_OpensslCipher_init + static int check_update_max_output_len(EVP_CIPHER_CTX *context, int input_len, + int max_output_len) + { +- if (context->flags & EVP_CIPH_NO_PADDING) { ++ unsigned long flags = EVP_CIPHER_flags(EVP_CIPHER_CTX_cipher(context)); ++ if (flags & EVP_CIPH_NO_PADDING) { + if (max_output_len >= input_len) { + return 1; + } + return 0; + } else { +- int b = context->cipher->block_size; +- if (context->encrypt) { ++ int b = EVP_CIPHER_CTX_block_size(context); ++ if (EVP_CIPHER_CTX_encrypting(context)) { + if (max_output_len >= input_len + b - 1) { + return 1; + } +@@ -307,10 +308,11 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_crypto_OpensslCipher_update + static int check_doFinal_max_output_len(EVP_CIPHER_CTX *context, + int max_output_len) + { +- if (context->flags & EVP_CIPH_NO_PADDING) { ++ unsigned long flags = EVP_CIPHER_flags(EVP_CIPHER_CTX_cipher(context)); ++ if (flags & EVP_CIPH_NO_PADDING) { + return 1; + } else { +- int b = context->cipher->block_size; ++ int b = EVP_CIPHER_CTX_block_size(context); + if (max_output_len >= b) { + return 1; + } diff --git a/hadoop-tools.jar.patch b/hadoop-tools.jar.patch index b160ed7..db8d527 100644 --- a/hadoop-tools.jar.patch +++ b/hadoop-tools.jar.patch @@ -2,21 +2,7 @@ diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-pr index c3e1aa1..9042f73 100644 --- a/hadoop-common-project/hadoop-annotations/pom.xml +++ b/hadoop-common-project/hadoop-annotations/pom.xml -@@ -48,11 +48,8 @@ - - - -- jdk.tools -- jdk.tools -- 1.6 -- system -- ${java.home}/../lib/tools.jar -+ com.sun -+ tools - - - -@@ -63,11 +60,8 @@ +@@ -46,11 +46,8 @@ @@ -30,3 +16,17 @@ index c3e1aa1..9042f73 100644 +@@ -61,11 +58,8 @@ + + + +- jdk.tools +- jdk.tools +- 1.8 +- system +- ${java.home}/../lib/tools.jar ++ com.sun ++ tools + + + diff --git a/hadoop.spec b/hadoop.spec index cad799f..225b317 100644 --- a/hadoop.spec +++ b/hadoop.spec @@ -1,8 +1,5 @@ %global _hardened_build 1 -%global commit 9e2ef43a240fb0f603d8c384e501daec11524510 -%global shortcommit %(c=%{commit}; echo ${c:0:7}) - %global hadoop_version %{version} %global hdfs_services hadoop-zkfc.service hadoop-datanode.service hadoop-secondarynamenode.service hadoop-namenode.service hadoop-journalnode.service %global mapreduce_services hadoop-historyserver.service @@ -13,14 +10,14 @@ %global __provides_exclude_from ^%{_libdir}/%{name}/.*$ Name: hadoop -Version: 2.4.1 -Release: 26%{?dist} +Version: 2.7.3 +Release: 1%{?dist} Summary: A software platform for processing vast amounts of data # The BSD license file is missing # https://issues.apache.org/jira/browse/HADOOP-9849 License: ASL 2.0 and BSD URL: https://%{name}.apache.org -Source0: https://github.com/apache/hadoop-common/archive/%{commit}/%{name}-%{version}-%{shortcommit}.tar.gz +Source0: https://www.apache.org/dist/%{name}/core/%{name}-%{version}/%{name}-%{version}-src.tar.gz Source1: %{name}-layout.sh Source2: %{name}-hdfs.service.template Source3: %{name}-mapreduce.service.template @@ -42,13 +39,11 @@ Source14: %{name}-tomcat-users.xml Patch0: %{name}-fedora-integration.patch # Fedora packaging guidelines for JNI library loading Patch2: %{name}-jni-library-loading.patch -# Clean up warnings with maven 3.0.5 -Patch3: %{name}-maven.patch # Don't download tomcat Patch4: %{name}-no-download-tomcat.patch # Use dlopen to find libjvm.so Patch5: %{name}-dlopen-libjvm.patch -# Update to Guava 17.0 +# Update to Guava 18.0 Patch7: %{name}-guava.patch # Update to Netty 3.6.6-Final Patch8: %{name}-netty-3-Final.patch @@ -60,21 +55,17 @@ Patch10: %{name}-build.patch Patch12: %{name}-armhfp.patch # fix Jersey1 support -Patch13: hadoop-2.4.1-jersey1.patch +Patch13: hadoop-jersey1.patch # fix java8 doclint Patch14: hadoop-2.4.1-disable-doclint.patch -# fix exception org.jets3t.service.S3ServiceException is never thrown in body of corresponding try statement -Patch15: hadoop-2.4.1-jets3t0.9.3.patch -# add some servlet3.1 missing methods -Patch16: hadoop-2.4.1-servlet-3.1-api.patch -# Adapt to the new BookKeeper ZkUtils API -Patch17: hadoop-2.4.1-new-bookkeeper.patch -# Fix POM warnings which become errors in newest Maven -Patch18: fix-pom-errors.patch %if 0%{?fedora} > 25 # Fix Protobuf compiler errors after updating to 3.1.0 Patch19: protobuf3.patch %endif +# Patch openssl 1.0.2 to use 1.1.0 +Patch21: %{name}-openssl.patch +# fix exception no longer thrown in aws +Patch22: %{name}-aws.patch # This is not a real BR, but is here because of rawhide shift to eclipse # aether packages which caused a dependency of a dependency to not get @@ -97,16 +88,19 @@ BuildRequires: apache-commons-logging BuildRequires: apache-commons-math BuildRequires: apache-commons-net BuildRequires: apache-rat-plugin +BuildRequires: apacheds-kerberos BuildRequires: atinject BuildRequires: avalon-framework BuildRequires: avalon-logkit BuildRequires: avro BuildRequires: avro-maven-plugin +BuildRequires: aws-sdk-java BuildRequires: bookkeeper-java BuildRequires: cglib BuildRequires: checkstyle BuildRequires: chrpath BuildRequires: cmake +BuildRequires: curator BuildRequires: ecj >= 1:4.2.1-6 BuildRequires: fuse-devel BuildRequires: fusesource-pom @@ -121,6 +115,7 @@ BuildRequires: guice-servlet BuildRequires: hamcrest BuildRequires: hawtjni BuildRequires: hsqldb +BuildRequires: htrace BuildRequires: httpcomponents-client BuildRequires: httpcomponents-core BuildRequires: istack-commons @@ -137,11 +132,13 @@ BuildRequires: jersey1-contribs BuildRequires: jets3t BuildRequires: jettison BuildRequires: jetty8 +BuildRequires: jetty-util-ajax BuildRequires: jsch BuildRequires: json_simple BuildRequires: jspc BuildRequires: jsr-305 BuildRequires: jsr-311 +BuildRequires: jul-to-slf4j BuildRequires: junit BuildRequires: jzlib BuildRequires: leveldbjni @@ -166,6 +163,7 @@ BuildRequires: metrics BuildRequires: mockito BuildRequires: native-maven-plugin BuildRequires: netty3 +BuildRequires: netty BuildRequires: objectweb-asm BuildRequires: objenesis >= 1.2-16 BuildRequires: openssl-devel @@ -184,7 +182,6 @@ BuildRequires: tomcat-log4j BuildRequires: tomcat-servlet-3.1-api BuildRequires: txw2 BuildRequires: xmlenc -BuildRequires: znerd-oss-parent BuildRequires: zookeeper-java > 3.4.5-15 # For tests BuildRequires: jersey1-test-framework @@ -428,10 +425,10 @@ offering local computation and storage. This package contains files needed to run Apache Hadoop YARN in secure mode. %prep -%autosetup -p1 -n %{name}-common-%{commit} +%autosetup -p1 -n %{name}-%{version}-src %if 0%{?fedora} > 25 -%pom_xpath_set "pom:properties/pom:protobuf.version" 3.1.0 hadoop-project +%pom_xpath_set "pom:properties/pom:protobuf.version" 3.2.0 hadoop-project %else %pom_xpath_set "pom:properties/pom:protobuf.version" 2.6.1 hadoop-project %endif @@ -467,14 +464,82 @@ This package contains files needed to run Apache Hadoop YARN in secure mode. # Disable the hadoop-minikdc module due to missing deps %pom_disable_module hadoop-minikdc hadoop-common-project +%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-common %pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-auth %pom_remove_dep :hadoop-minikdc hadoop-project %pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests +%pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-kms +%pom_remove_dep :hadoop-minikdc hadoop-hdfs-project/hadoop-hdfs +%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry +%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager +%pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZonesWithKMS.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java + +# Remove other deps only needed for testing +%pom_remove_dep :tomcat-embed-core hadoop-project +%pom_remove_dep :tomcat-embed-logging-juli hadoop-project +%pom_remove_dep :tomcat-embed-core hadoop-common-project/hadoop-auth +%pom_remove_dep :tomcat-embed-logging-juli hadoop-common-project/hadoop-auth +rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java +rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java +%pom_xpath_remove "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-project +%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-hdfs-project/hadoop-hdfs-httpfs +%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-common-project/hadoop-common +%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager +%pom_xpath_remove "pom:project/pom:dependencies/pom:dependency[pom:artifactId='hadoop-auth' and pom:type='test-jar']" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice + +# Remove tests with errors - Tests are not needed for packaging so don't bother +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGILoginFromKeytab.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java +rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java +rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSWithZK.java +rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java +rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java +rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithSaslDataTransfer.java +rm -rf hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager +rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/test/YarnTestDriver.java +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/test +rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test +rm -f hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java +rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test +rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test +rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test +rm -rf hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test +rm -rf hadoop-tools/hadoop-streaming/src/test +rm -rf hadoop-tools/hadoop-gridmix/src/test/java +rm -rf hadoop-tools/hadoop-extras/src/test + +# Remove dist plugin. It's not needed and has issues +%pom_remove_plugin :maven-antrun-plugin hadoop-common-project/hadoop-kms +%pom_remove_plugin :maven-antrun-plugin hadoop-dist + +# remove plugin causing to build the same jar twice +%pom_remove_plugin :maven-jar-plugin hadoop-common-project/hadoop-auth + +# modify version of apacheds-kerberos-codec to 2.0.0-M15 +%pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='apacheds-kerberos-codec']/pom:version" 2.0.0-M21 hadoop-project %if 0%{?fedora} > 25 # Disable hadoop-pipes, because it needs upstream patching for Openssl 1.1.0 @@ -493,6 +558,23 @@ rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-test %pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='asm']/pom:version" 5.0.2 hadoop-project %pom_xpath_set "pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='asm']/pom:groupId" org.ow2.asm hadoop-project +# Add missing deps +%pom_add_dep org.iq80.leveldb:leveldb hadoop-hdfs-project/hadoop-hdfs +%pom_add_dep org.iq80.leveldb:leveldb hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common +%pom_add_dep org.eclipse.jetty:jetty-util-ajax hadoop-hdfs-project/hadoop-hdfs +%pom_add_dep org.eclipse.jetty:jetty-util-ajax hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager + +# remove plugins that are not needed +%pom_remove_plugin :maven-jar-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy +%pom_remove_plugin :maven-antrun-plugin hadoop-tools/hadoop-streaming + +# disable microsoft azure because the package is not available +%pom_disable_module hadoop-azure hadoop-tools +%pom_remove_dep :hadoop-azure hadoop-tools/hadoop-tools-dist + +# disable kms war because it breaks bundling policy +%pom_disable_module hadoop-kms hadoop-common-project +%pom_remove_dep :hadoop-kms hadoop-hdfs-project/hadoop-hdfs # War files we don't want %mvn_package :%{name}-auth-examples __noinstall @@ -536,7 +618,7 @@ rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-test %ifarch s390x ppc64le export MAVEN_OPTS="-Xms2048M -Xmx4096M" %endif -%mvn_build -j -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT +%mvn_build -j -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipTest -DskipIT -Dmaven.javadoc.skip=true # This takes a long time to run, so comment out for now #%%check @@ -602,11 +684,19 @@ install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-yarn install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-hdfs install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-mapreduce -basedir='%{name}-dist/target/%{name}-%{hadoop_version}' +basedir='%{name}-common-project/%{name}-common/target/%{name}-common-%{hadoop_version}' +hdfsdir='%{name}-hdfs-project/%{name}-hdfs/target/%{name}-hdfs-%{hadoop_version}' +httpfsdir='%{name}-hdfs-project/%{name}-hdfs-httpfs/target/%{name}-hdfs-httpfs-%{hadoop_version}' +mapreddir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}' +yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}' +# copy script folders for dir in bin libexec sbin do cp -arf $basedir/$dir %{buildroot}/%{_prefix} + cp -arf $hdfsdir/$dir %{buildroot}/%{_prefix} + cp -arf $mapreddir/$dir %{buildroot}/%{_prefix} + cp -arf $yarndir/$dir %{buildroot}/%{_prefix} done # This binary is obsoleted and causes a conflict with qt-devel @@ -618,11 +708,17 @@ rm -f %{buildroot}/%{_bindir}/test-container-executor # Duplicate files rm -f %{buildroot}/%{_sbindir}/hdfs-config.sh +# copy config files cp -arf $basedir/etc/* %{buildroot}/%{_sysconfdir} +cp -arf $httpfsdir/etc/* %{buildroot}/%{_sysconfdir} +cp -arf $mapreddir/etc/* %{buildroot}/%{_sysconfdir} +cp -arf $yarndir/etc/* %{buildroot}/%{_sysconfdir} + +# copy binaries cp -arf $basedir/lib/native/libhadoop.so* %{buildroot}/%{_libdir}/%{name} chrpath --delete %{buildroot}/%{_libdir}/%{name}/* -cp -arf $basedir/include/hdfs.h %{buildroot}/%{_includedir}/%{name} -cp -arf $basedir/lib/native/libhdfs.so* %{buildroot}/%{_libdir} +cp -arf $hdfsdir/include/hdfs.h %{buildroot}/%{_includedir}/%{name} +cp -arf $hdfsdir/lib/native/libhdfs.so* %{buildroot}/%{_libdir} chrpath --delete %{buildroot}/%{_libdir}/libhdfs* cp -af hadoop-hdfs-project/hadoop-hdfs/target/native/main/native/fuse-dfs/fuse_dfs %{buildroot}/%{_bindir} chrpath --delete %{buildroot}/%{_bindir}/fuse_dfs @@ -675,10 +771,10 @@ pushd $basedir/share/%{name}/common/lib popd # hdfs jar dependencies -copy_dep_jars $basedir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib +copy_dep_jars $hdfsdir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/hdfs/lib %{__ln_s} %{_jnidir}/%{name}/%{name}-hdfs-bkjournal.jar %{buildroot}/%{_datadir}/%{name}/hdfs/lib -pushd $basedir/share/%{name}/hdfs +pushd $hdfsdir/share/%{name}/hdfs link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/hdfs popd @@ -738,23 +834,25 @@ pushd %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat popd # mapreduce jar dependencies -copy_dep_jars $basedir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib +mrdir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}' +copy_dep_jars $mrdir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/mapreduce/lib %{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/mapreduce/lib -pushd $basedir/share/%{name}/mapreduce +pushd $mrdir/share/%{name}/mapreduce link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/mapreduce popd # yarn jar dependencies -copy_dep_jars $basedir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib +yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}' +copy_dep_jars $yarndir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib %{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/yarn/lib %{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/yarn/lib -pushd $basedir/share/%{name}/yarn +pushd $yarndir/share/%{name}/yarn link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/yarn popd # Install hdfs webapp bits -cp -arf $basedir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps +cp -arf $hdfsdir/share/hadoop/hdfs/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps # hadoop layout. Convert to appropriate lib location for 32 and 64 bit archs lib=$(echo %{?_libdir} | sed -e 's:/usr/\(.*\):\1:') @@ -903,17 +1001,20 @@ fi %{_datadir}/%{name}/client %files -f .mfiles common -%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/* -%config(noreplace) %{_sysconfdir}/%{name}/configuration.xsl +%doc LICENSE.txt +%doc NOTICE.txt +%doc README.txt %config(noreplace) %{_sysconfdir}/%{name}/core-site.xml %config(noreplace) %{_sysconfdir}/%{name}/%{name}-env.sh %config(noreplace) %{_sysconfdir}/%{name}/%{name}-metrics.properties %config(noreplace) %{_sysconfdir}/%{name}/%{name}-metrics2.properties %config(noreplace) %{_sysconfdir}/%{name}/%{name}-policy.xml %config(noreplace) %{_sysconfdir}/%{name}/log4j.properties -%config(noreplace) %{_sysconfdir}/%{name}/slaves %config(noreplace) %{_sysconfdir}/%{name}/ssl-client.xml.example %config(noreplace) %{_sysconfdir}/%{name}/ssl-server.xml.example +%config(noreplace) %{_sysconfdir}/%{name}/slaves +%config(noreplace) %{_sysconfdir}/%{name}/configuration.xsl + %dir %{_datadir}/%{name} %dir %{_datadir}/%{name}/common %{_datadir}/%{name}/common/lib @@ -984,7 +1085,6 @@ fi %attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs/work %files -n libhdfs -%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/hdfs/LICENSE.txt %{_libdir}/libhdfs.so.* %files -f .mfiles-%{name}-mapreduce mapreduce @@ -1006,7 +1106,6 @@ fi %files -f .mfiles-%{name}-mapreduce-examples mapreduce-examples %files -f .mfiles-%{name}-maven-plugin maven-plugin -%doc hadoop-dist/target/hadoop-%{hadoop_version}/share/doc/hadoop/common/LICENSE.txt %files -f .mfiles-%{name}-tests tests @@ -1037,6 +1136,9 @@ fi %attr(6050,root,yarn) %{_bindir}/container-executor %changelog +* Thu Mar 02 2017 Mike Miller - 2.7.3-1 +- Update to 2.7.3 + * Fri Feb 10 2017 Fedora Release Engineering - 2.4.1-26 - Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild