children = zkc.getChildren("/ledgers/available",
false);
mostRecentSize = children.size();
+ // TODO: Bookkeeper 4.2.0 introduced "readonly" bookies
+ // which mess with test bookie counts;
+ // unclear why setReadOnlyModeEnabled(false) doesn't have
+ // backward-compat effect hoped for
+ if (children.contains("readonly")) {
+ mostRecentSize = children.size()-1;
+ }
if (LOG.isDebugEnabled()) {
LOG.debug("Found " + mostRecentSize + " bookies up, "
+ "waiting for " + count);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 0be46de..b2834b8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -171,7 +171,7 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.VersionInfo;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
@@ -366,7 +366,7 @@ private void startInfoServer(Configuration conf) throws IOException {
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
: new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
- secureResources.getListener());
+ secureResources.getListener(), null, secureResources.getServer());
LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
index 0fda306..77c6c82 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
@@ -32,14 +32,15 @@
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.SSLFactory;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.nio.SelectChannelConnector;
-import org.mortbay.jetty.security.SslSocketConnector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
import javax.net.ssl.SSLServerSocketFactory;
import com.google.common.annotations.VisibleForTesting;
+
/**
* Utility class to start a datanode in a secure cluster, first obtaining
* privileged resources before main startup and handing them to the datanode.
@@ -50,17 +51,21 @@
*/
public static class SecureResources {
private final ServerSocket streamingSocket;
- private final Connector listener;
+ private final ServerConnector listener;
+ private final Server server;
public SecureResources(ServerSocket streamingSocket,
- Connector listener) {
+ ServerConnector listener, Server server) {
this.streamingSocket = streamingSocket;
this.listener = listener;
+ this.server = server;
}
public ServerSocket getStreamingSocket() { return streamingSocket; }
- public Connector getListener() { return listener; }
+ public ServerConnector getListener() { return listener; }
+
+ public Server getServer() { return server; }
}
private String [] args;
@@ -94,6 +99,9 @@ public void start() throws Exception {
@VisibleForTesting
public static SecureResources getSecureResources(final SSLFactory sslFactory,
Configuration conf) throws Exception {
+ // Create a server
+ Server server = HttpServer.createServer(conf);
+
// Obtain secure port for data streaming to datanode
InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf);
int socketWriteTimeout = conf.getInt(DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
@@ -110,22 +118,28 @@ public static SecureResources getSecureResources(final SSLFactory sslFactory,
}
// Obtain secure listener for web server
- Connector listener;
+ ServerConnector listener;
if (HttpConfig.isSecure()) {
try {
sslFactory.init();
} catch (GeneralSecurityException ex) {
throw new IOException(ex);
}
- SslSocketConnector sslListener = new SslSocketConnector() {
- @Override
- protected SSLServerSocketFactory createFactory() throws Exception {
- return sslFactory.createSSLServerSocketFactory();
- }
+ SslContextFactory sslContextFactory = new SslContextFactory(conf.get("ssl.server.keystore.location",""));
+ sslContextFactory.setKeyStorePassword(conf.get("ssl.server.keystore.password",""));
+ if (sslFactory.isClientCertRequired()) {
+ sslContextFactory.setTrustStorePath(conf.get("ssl.server.truststore.location",""));
+ sslContextFactory.setTrustStorePassword(conf.get("ssl.server.truststore.password",""));
+ sslContextFactory.setTrustStoreType(conf.get("ssl.server.truststore.type", "jks"));
+ }
+ ServerConnector sslListener = new ServerConnector(server, sslContextFactory) {
+ protected SSLServerSocketFactory createFactory() throws Exception {
+ return sslFactory.createSSLServerSocketFactory();
+ }
};
listener = sslListener;
} else {
- listener = HttpServer.createDefaultChannelConnector();
+ listener = HttpServer.createDefaultChannelConnector(server);
}
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
@@ -138,7 +152,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
"context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
}
System.err.println("Successfully obtained privileged resources (streaming port = "
- + ss + " ) (http listener port = " + listener.getConnection() +")");
+ + ss + " ) (http listener port = " + listener.getLocalPort() +")");
if ((ss.getLocalPort() > 1023 || listener.getPort() > 1023) &&
UserGroupInformation.isSecurityEnabled()) {
@@ -146,7 +160,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
}
System.err.println("Opened streaming server at " + streamingAddr);
System.err.println("Opened info server at " + infoSocAddr);
- return new SecureResources(ss, listener);
+ return new SecureResources(ss, listener, server);
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 8c114c3..7544cd1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -201,7 +201,7 @@
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.VersionInfo;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
index aa4ba5d..5b945ba 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
@@ -39,7 +39,7 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
-import org.mortbay.jetty.InclusiveByteRange;
+import org.eclipse.jetty.server.InclusiveByteRange;
@InterfaceAudience.Private
public class StreamFile extends DfsServlet {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
index f251e34..bed9d58 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
@@ -47,7 +47,7 @@
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.StringUtils;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/** JSON Utilities */
public class JsonUtil {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 5b32826..f130da1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -82,7 +82,7 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
index cf624b7..6ae1a20 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.VersionInfo;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* Class for testing {@link NameNodeMXBean} implementation
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
index daaa6d8..683f414 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
@@ -46,7 +46,7 @@
import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
import org.mockito.Mockito;
-import org.mortbay.jetty.InclusiveByteRange;
+import org.eclipse.jetty.server.InclusiveByteRange;
/*
* Mock input stream class that always outputs the current position of the stream.
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
index aa2393d..432f92e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
@@ -27,7 +27,7 @@
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
public class TestJsonUtil {
static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
index 7029f42..c7023c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* This class drives the creation of a mini-cluster on the local machine. By
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
index 518305f..87be820 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
/**
* This class handles job end notification. Submitters of jobs can choose to
@@ -97,10 +97,10 @@ public void setConf(Configuration conf) {
int port = Integer.parseInt(portConf);
proxyToUse = new Proxy(proxyType,
new InetSocketAddress(hostname, port));
- Log.info("Job end notification using proxy type \"" + proxyType +
+ Log.getRootLogger().info("Job end notification using proxy type \"" + proxyType +
"\" hostname \"" + hostname + "\" and port \"" + port + "\"");
} catch(NumberFormatException nfe) {
- Log.warn("Job end notification couldn't parse configured proxy's port "
+ Log.getRootLogger().warn("Job end notification couldn't parse configured proxy's port "
+ portConf + ". Not going to use a proxy");
}
}
@@ -118,23 +118,23 @@ public Configuration getConf() {
protected boolean notifyURLOnce() {
boolean success = false;
try {
- Log.info("Job end notification trying " + urlToNotify);
+ Log.getRootLogger().info("Job end notification trying " + urlToNotify);
HttpURLConnection conn =
(HttpURLConnection) urlToNotify.openConnection(proxyToUse);
conn.setConnectTimeout(5*1000);
conn.setReadTimeout(5*1000);
conn.setAllowUserInteraction(false);
if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
- Log.warn("Job end notification to " + urlToNotify +" failed with code: "
+ Log.getRootLogger().warn("Job end notification to " + urlToNotify +" failed with code: "
+ conn.getResponseCode() + " and message \"" + conn.getResponseMessage()
+"\"");
}
else {
success = true;
- Log.info("Job end notification to " + urlToNotify + " succeeded");
+ Log.getRootLogger().info("Job end notification to " + urlToNotify + " succeeded");
}
} catch(IOException ioe) {
- Log.warn("Job end notification to " + urlToNotify + " failed", ioe);
+ Log.getRootLogger().warn("Job end notification to " + urlToNotify + " failed", ioe);
}
return success;
}
@@ -149,7 +149,7 @@ public void notify(JobReport jobReport)
throws InterruptedException {
// Do we need job-end notification?
if (userUrl == null) {
- Log.info("Job end notification URL not set, skipping.");
+ Log.getRootLogger().info("Job end notification URL not set, skipping.");
return;
}
@@ -165,23 +165,23 @@ public void notify(JobReport jobReport)
try {
urlToNotify = new URL(userUrl);
} catch (MalformedURLException mue) {
- Log.warn("Job end notification couldn't parse " + userUrl, mue);
+ Log.getRootLogger().warn("Job end notification couldn't parse " + userUrl, mue);
return;
}
// Send notification
boolean success = false;
while (numTries-- > 0 && !success) {
- Log.info("Job end notification attempts left " + numTries);
+ Log.getRootLogger().info("Job end notification attempts left " + numTries);
success = notifyURLOnce();
if (!success) {
Thread.sleep(waitInterval);
}
}
if (!success) {
- Log.warn("Job end notification failed to notify : " + urlToNotify);
+ Log.getRootLogger().warn("Job end notification failed to notify : " + urlToNotify);
} else {
- Log.info("Job end notification succeeded for " + jobReport.getJobId());
+ Log.getRootLogger().info("Job end notification succeeded for " + jobReport.getJobId());
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
index 0b23c95..b08b854 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -238,7 +238,7 @@ public void testJobsQueryStateNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -304,7 +304,7 @@ public void testJobsQueryUserNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -389,7 +389,7 @@ public void testJobsQueryQueueNonExist() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -421,7 +421,7 @@ public void testJobsQueryStartTimeBegin() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -741,7 +741,7 @@ public void testJobsQueryFinishTimeEnd() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
index d2ea74e..32d6b0e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.mapred;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text;
@@ -69,7 +70,7 @@ private void startHttpServer() throws Exception {
}
webServer = new Server(0);
- Context context = new Context(webServer, contextPath);
+ ServletContextHandler context = new ServletContextHandler(webServer, contextPath);
// create servlet handler
context.addServlet(new ServletHolder(new NotificationServlet()),
@@ -77,7 +78,7 @@ private void startHttpServer() throws Exception {
// Start webServer
webServer.start();
- port = webServer.getConnectors()[0].getLocalPort();
+ port = ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
index 35b5e30..91964bd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
@@ -45,7 +45,7 @@
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* This class drives the creation of a mini-cluster on the local machine. By
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
index 1581bab..48d0f6b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
@@ -43,8 +43,8 @@
avro
- org.mortbay.jetty
- jetty
+ org.eclipse.jetty
+ jetty-server
org.apache.ant
@@ -78,16 +78,8 @@
commons-el
- tomcat
- jasper-runtime
-
-
- tomcat
- jasper-compiler
-
-
- org.mortbay.jetty
- jsp-2.1-jetty
+ org.apache.tomcat
+ tomcat-jasper