children = zkc.getChildren("/ledgers/available",
- false);
- mostRecentSize = children.size();
-+ // TODO: Bookkeeper 4.2.0 introduced "readonly" bookies
-+ // which mess with test bookie counts;
-+ // unclear why setReadOnlyModeEnabled(false) doesn't have
-+ // backward-compat effect hoped for
-+ if (children.contains("readonly")) {
-+ mostRecentSize = children.size()-1;
-+ }
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found " + mostRecentSize + " bookies up, "
- + "waiting for " + count);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
index 50b44f8..d5a91d3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
@@ -1710,40 +1436,23 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdf
index fc85a5e..1610c8c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
-@@ -88,7 +88,7 @@
- import org.apache.hadoop.util.*;
- import org.apache.hadoop.util.DiskChecker.DiskErrorException;
- import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
+@@ -202,7 +202,7 @@ import org.apache.hadoop.util.VersionInfo;
+ import org.apache.hadoop.tracing.SpanReceiverHost;
+ import org.apache.hadoop.tracing.SpanReceiverInfo;
+ import org.apache.hadoop.tracing.TraceAdminProtocol;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
- import javax.management.ObjectName;
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
-index 477b7f6..8a22654 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
-@@ -30,10 +30,11 @@
- import org.apache.hadoop.http.HttpConfig;
- import org.apache.hadoop.http.HttpServer2;
- import org.apache.hadoop.security.UserGroupInformation;
--import org.mortbay.jetty.Connector;
-+import org.eclipse.jetty.server.Connector;
-
import com.google.common.annotations.VisibleForTesting;
-
-+
- /**
- * Utility class to start a datanode in a secure cluster, first obtaining
- * privileged resources before main startup and handing them to the datanode.
+ import com.google.common.base.Joiner;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 4232e00..3386dff 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
-@@ -264,7 +264,7 @@
- import org.apache.log4j.Appender;
+@@ -289,7 +289,7 @@ import org.apache.log4j.Appender;
import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Logger;
+ import org.codehaus.jackson.map.ObjectMapper;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
@@ -1762,32 +1471,6 @@ index aa4ba5d..5b945ba 100644
@InterfaceAudience.Private
public class StreamFile extends DfsServlet {
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
-index 50a7f21..1d96e15 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
-@@ -32,7 +32,7 @@
- import org.apache.hadoop.security.token.TokenIdentifier;
- import org.apache.hadoop.util.DataChecksum;
- import org.apache.hadoop.util.StringUtils;
--import org.mortbay.util.ajax.JSON;
-+import org.eclipse.jetty.util.ajax.JSON;
-
- import java.io.ByteArrayInputStream;
- import java.io.DataInputStream;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
-index 6aa935c..dfc1e39 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
-@@ -98,7 +98,7 @@
- import org.apache.hadoop.security.token.Token;
- import org.apache.hadoop.security.token.TokenIdentifier;
- import org.apache.hadoop.util.Progressable;
--import org.mortbay.util.ajax.JSON;
-+import org.eclipse.jetty.util.ajax.JSON;
-
- import com.google.common.annotations.VisibleForTesting;
- import com.google.common.base.Charsets;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java
index 3471848..b4e0202 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNodeMXBean.java
@@ -1818,15 +1501,15 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdf
index d459d30..6327a83 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
-@@ -37,7 +37,7 @@
- import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator;
+@@ -32,7 +32,7 @@ import org.apache.hadoop.io.nativeio.NativeIO.POSIX.NoMlockCacheManipulator;
import org.apache.hadoop.util.VersionInfo;
+ import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
- /**
- * Class for testing {@link NameNodeMXBean} implementation
+ import javax.management.MBeanServer;
+ import javax.management.ObjectName;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
index 0f22e9a..bff549a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
@@ -1853,19 +1536,6 @@ index f24b801..28d05b4 100644
/*
* Mock input stream class that always outputs the current position of the stream.
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
-index 2bce30f..eaf836d 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
-@@ -38,7 +38,7 @@
- import org.apache.hadoop.util.Time;
- import org.junit.Assert;
- import org.junit.Test;
--import org.mortbay.util.ajax.JSON;
-+import org.eclipse.jetty.util.ajax.JSON;
-
- import com.google.common.collect.Lists;
-
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
index 7029f42..c7023c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
@@ -1883,15 +1553,102 @@ diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-c
index 981e6ff..7864756 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
-@@ -30,7 +30,7 @@
+@@ -30,7 +30,8 @@
import org.apache.hadoop.mapred.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
++import org.eclipse.jetty.util.log.Logger;
/**
* This class handles job end notification. Submitters of jobs can choose to
+@@ -48,6 +49,7 @@ import org.eclipse.jetty.util.log.Log;
+ public class JobEndNotifier implements Configurable {
+ private static final String JOB_ID = "$jobId";
+ private static final String JOB_STATUS = "$jobStatus";
++ private static Logger log = Log.getLogger(JobEndNotifier.class);
+
+ private Configuration conf;
+ protected String userUrl;
+@@ -101,10 +103,10 @@ public class JobEndNotifier implements Configurable {
+ int port = Integer.parseInt(portConf);
+ proxyToUse = new Proxy(proxyType,
+ new InetSocketAddress(hostname, port));
+- Log.info("Job end notification using proxy type \"" + proxyType +
++ log.info("Job end notification using proxy type \"" + proxyType +
+ "\" hostname \"" + hostname + "\" and port \"" + port + "\"");
+ } catch(NumberFormatException nfe) {
+- Log.warn("Job end notification couldn't parse configured proxy's port "
++ log.warn("Job end notification couldn't parse configured proxy's port "
+ + portConf + ". Not going to use a proxy");
+ }
+ }
+@@ -121,23 +123,23 @@ public class JobEndNotifier implements Configurable {
+ protected boolean notifyURLOnce() {
+ boolean success = false;
+ try {
+- Log.info("Job end notification trying " + urlToNotify);
++ log.info("Job end notification trying " + urlToNotify);
+ HttpURLConnection conn =
+ (HttpURLConnection) urlToNotify.openConnection(proxyToUse);
+ conn.setConnectTimeout(timeout);
+ conn.setReadTimeout(timeout);
+ conn.setAllowUserInteraction(false);
+ if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
+- Log.warn("Job end notification to " + urlToNotify +" failed with code: "
++ log.warn("Job end notification to " + urlToNotify +" failed with code: "
+ + conn.getResponseCode() + " and message \"" + conn.getResponseMessage()
+ +"\"");
+ }
+ else {
+ success = true;
+- Log.info("Job end notification to " + urlToNotify + " succeeded");
++ log.info("Job end notification to " + urlToNotify + " succeeded");
+ }
+ } catch(IOException ioe) {
+- Log.warn("Job end notification to " + urlToNotify + " failed", ioe);
++ log.warn("Job end notification to " + urlToNotify + " failed", ioe);
+ }
+ return success;
+ }
+@@ -152,7 +154,7 @@ public class JobEndNotifier implements Configurable {
+ throws InterruptedException {
+ // Do we need job-end notification?
+ if (userUrl == null) {
+- Log.info("Job end notification URL not set, skipping.");
++ log.info("Job end notification URL not set, skipping.");
+ return;
+ }
+
+@@ -168,23 +170,23 @@ public class JobEndNotifier implements Configurable {
+ try {
+ urlToNotify = new URL(userUrl);
+ } catch (MalformedURLException mue) {
+- Log.warn("Job end notification couldn't parse " + userUrl, mue);
++ log.warn("Job end notification couldn't parse " + userUrl, mue);
+ return;
+ }
+
+ // Send notification
+ boolean success = false;
+ while (numTries-- > 0 && !success) {
+- Log.info("Job end notification attempts left " + numTries);
++ log.info("Job end notification attempts left " + numTries);
+ success = notifyURLOnce();
+ if (!success) {
+ Thread.sleep(waitInterval);
+ }
+ }
+ if (!success) {
+- Log.warn("Job end notification failed to notify : " + urlToNotify);
++ log.warn("Job end notification failed to notify : " + urlToNotify);
+ } else {
+- Log.info("Job end notification succeeded for " + jobReport.getJobId());
++ log.info("Job end notification succeeded for " + jobReport.getJobId());
+ }
+ }
+ }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
index 8891ec7..1dd369a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -1984,25 +1741,99 @@ diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-c
index c803a7f..393d385 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
-@@ -111,7 +111,7 @@
+@@ -118,6 +118,7 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+ import org.jboss.netty.handler.codec.frame.TooLongFrameException;
+ import org.jboss.netty.handler.codec.http.DefaultHttpResponse;
+ import org.jboss.netty.handler.codec.http.HttpChunkAggregator;
++import org.jboss.netty.handler.codec.http.HttpHeaders;
+ import org.jboss.netty.handler.codec.http.HttpRequest;
+ import org.jboss.netty.handler.codec.http.HttpRequestDecoder;
+ import org.jboss.netty.handler.codec.http.HttpResponse;
+@@ -127,7 +127,6 @@ import org.jboss.netty.handler.codec.http.QueryStringDecoder;
import org.jboss.netty.handler.ssl.SslHandler;
import org.jboss.netty.handler.stream.ChunkedWriteHandler;
import org.jboss.netty.util.CharsetUtil;
-import org.mortbay.jetty.HttpHeaders;
-+import org.eclipse.jetty.http.HttpHeaders;
+ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
- import com.google.common.util.concurrent.ThreadFactoryBuilder;
+@@ -830,9 +830,9 @@ public class ShuffleHandler extends AuxiliaryService {
+ }
+ // Check whether the shuffle version is compatible
+ if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(
+- request.getHeader(ShuffleHeader.HTTP_HEADER_NAME))
++ request.headers().get(ShuffleHeader.HTTP_HEADER_NAME))
+ || !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(
+- request.getHeader(ShuffleHeader.HTTP_HEADER_VERSION))) {
++ request.headers().get(ShuffleHeader.HTTP_HEADER_VERSION))) {
+ sendError(ctx, "Incompatible shuffle request version", BAD_REQUEST);
+ }
+ final Map> q =
+@@ -1044,12 +1044,12 @@ public class ShuffleHandler extends AuxiliaryService {
+ boolean keepAliveParam, long contentLength) {
+ if (!connectionKeepAliveEnabled && !keepAliveParam) {
+ LOG.info("Setting connection close header...");
+- response.setHeader(HttpHeaders.CONNECTION, CONNECTION_CLOSE);
++ response.headers().set(HttpHeaders.Names.CONNECTION, CONNECTION_CLOSE);
+ } else {
+- response.setHeader(HttpHeaders.CONTENT_LENGTH,
++ response.headers().set(HttpHeaders.Names.CONTENT_LENGTH,
+ String.valueOf(contentLength));
+- response.setHeader(HttpHeaders.CONNECTION, HttpHeaders.KEEP_ALIVE);
+- response.setHeader(HttpHeaders.KEEP_ALIVE, "timeout="
++ response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
++ response.headers().set(HttpHeaders.Values.KEEP_ALIVE, "timeout="
+ + connectionKeepAliveTimeOut);
+ LOG.info("Content Length in shuffle : " + contentLength);
+ }
+@@ -1077,7 +1077,7 @@ public class ShuffleHandler extends AuxiliaryService {
+ String enc_str = SecureShuffleUtils.buildMsgFrom(requestUri);
+ // hash from the fetcher
+ String urlHashStr =
+- request.getHeader(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
++ request.headers().get(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
+ if (urlHashStr == null) {
+ LOG.info("Missing header hash for " + appid);
+ throw new IOException("fetcher cannot be authenticated");
+@@ -1093,11 +1093,11 @@ public class ShuffleHandler extends AuxiliaryService {
+ String reply =
+ SecureShuffleUtils.generateHash(urlHashStr.getBytes(Charsets.UTF_8),
+ tokenSecret);
+- response.setHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
++ response.headers().set(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
+ // Put shuffle version into http header
+- response.setHeader(ShuffleHeader.HTTP_HEADER_NAME,
++ response.headers().set(ShuffleHeader.HTTP_HEADER_NAME,
+ ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
+- response.setHeader(ShuffleHeader.HTTP_HEADER_VERSION,
++ response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION,
+ ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
+ if (LOG.isDebugEnabled()) {
+ int len = reply.length();
+@@ -1163,11 +1163,11 @@ public class ShuffleHandler extends AuxiliaryService {
+ protected void sendError(ChannelHandlerContext ctx, String message,
+ HttpResponseStatus status) {
+ HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status);
+- response.setHeader(CONTENT_TYPE, "text/plain; charset=UTF-8");
++ response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8");
+ // Put shuffle version into http header
+- response.setHeader(ShuffleHeader.HTTP_HEADER_NAME,
++ response.headers().set(ShuffleHeader.HTTP_HEADER_NAME,
+ ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
+- response.setHeader(ShuffleHeader.HTTP_HEADER_VERSION,
++ response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION,
+ ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
+ response.setContent(
+ ChannelBuffers.copiedBuffer(message, CharsetUtil.UTF_8));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
index 420c428..3a3257e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java
-@@ -78,7 +78,7 @@
- import org.jboss.netty.handler.codec.http.HttpResponseStatus;
- import org.junit.Assert;
- import org.junit.Test;
+@@ -94,7 +94,6 @@ import org.junit.Test;
+ import org.mockito.invocation.InvocationOnMock;
+ import org.mockito.stubbing.Answer;
+ import org.mockito.Mockito;
-import org.mortbay.jetty.HttpHeaders;
-+import org.eclipse.jetty.http.HttpHeaders;
public class TestShuffleHandler {
static final long MiB = 1024 * 1024;
@@ -2021,25 +1852,6 @@ index 8ae5809..b7da2bc 100644
org.apache.ant
-@@ -78,16 +78,8 @@
- commons-el
-
-
-- tomcat
-- jasper-runtime
--
--
-- tomcat
-- jasper-compiler
--
--
-- org.mortbay.jetty
-- jsp-2.1-jetty
-+ org.apache.tomcat
-+ tomcat-jasper
-
-
-