hadoop/hadoop-fedora-integration.patch
2013-08-28 09:28:23 -04:00

2849 lines
132 KiB
Diff

diff --git a/hadoop-client/pom.xml b/hadoop-client/pom.xml
index 36a93a7..c2b52c9 100644
--- a/hadoop-client/pom.xml
+++ b/hadoop-client/pom.xml
@@ -44,12 +44,8 @@
<artifactId>commons-httpclient</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
@@ -64,24 +60,20 @@
<artifactId>commons-logging-api</artifactId>
</exclusion>
<exclusion>
- <groupId>jetty</groupId>
- <artifactId>org.mortbay.jetty</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-api-2.1</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api-2.5</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
diff --git a/hadoop-common-project/hadoop-annotations/pom.xml b/hadoop-common-project/hadoop-annotations/pom.xml
index 6996ea3..6750191 100644
--- a/hadoop-common-project/hadoop-annotations/pom.xml
+++ b/hadoop-common-project/hadoop-annotations/pom.xml
@@ -48,11 +48,8 @@
</activation>
<dependencies>
<dependency>
- <groupId>jdk.tools</groupId>
- <artifactId>jdk.tools</artifactId>
- <version>1.6</version>
- <scope>system</scope>
- <systemPath>${java.home}/../lib/tools.jar</systemPath>
+ <groupId>com.sun</groupId>
+ <artifactId>tools</artifactId>
</dependency>
</dependencies>
</profile>
diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
index a8642c0..29b4898 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -54,8 +54,9 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ <version>9.0.4.v20130625</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
index 6059d8c..bba1a00 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
@@ -17,11 +17,14 @@
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import junit.framework.TestCase;
import org.mockito.Mockito;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.FilterHolder;
-import org.mortbay.jetty.servlet.ServletHolder;
-
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletHolder;
+
+import javax.servlet.DispatcherType;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
@@ -38,12 +41,13 @@
import java.net.ServerSocket;
import java.net.URL;
import java.util.Properties;
+import java.util.EnumSet;
public abstract class AuthenticatorTestCase extends TestCase {
private Server server;
private String host = null;
private int port = -1;
- Context context;
+ ServletContextHandler context;
private static Properties authenticatorConfig;
@@ -84,17 +88,19 @@ protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws S
protected void start() throws Exception {
server = new Server(0);
- context = new Context();
+ context = new ServletContextHandler();
context.setContextPath("/foo");
server.setHandler(context);
- context.addFilter(new FilterHolder(TestFilter.class), "/*", 0);
+ context.addFilter(new FilterHolder(TestFilter.class), "/*", EnumSet.of(DispatcherType.REQUEST));
context.addServlet(new ServletHolder(TestServlet.class), "/bar");
host = "localhost";
ServerSocket ss = new ServerSocket(0);
port = ss.getLocalPort();
ss.close();
- server.getConnectors()[0].setHost(host);
- server.getConnectors()[0].setPort(port);
+ ServerConnector connector = new ServerConnector(server);
+ connector.setHost(host);
+ connector.setPort(port);
+ server.setConnectors(new Connector[] { connector });
server.start();
System.out.println("Running embedded servlet container at: http://" + host + ":" + port);
}
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 0f453f6..e775d31 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -53,7 +53,7 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
- <artifactId>commons-math</artifactId>
+ <artifactId>commons-math3</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
@@ -82,52 +82,79 @@
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util-ajax</artifactId>
+ <version>9.0.4.v20130625</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ <version>9.0.4.v20130625</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ <version>9.0.4.v20130625</version>
+ <scope>compile</scope>
+ </dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
+ <version>1.17.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<!-- Used, even though 'mvn dependency:analyze' doesn't find it -->
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
+ <version>1.17.1</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
+ <version>1.17.1</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-servlet</artifactId>
+ <version>1.17.1</version>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- <scope>runtime</scope>
+ <groupId>org.glassfish.web</groupId>
+ <artifactId>javax.servlet.jsp</artifactId>
+ <version>2.2.6</version>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <scope>runtime</scope>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-servlet-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <scope>runtime</scope>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-el-api</artifactId>
+ <version>7.0.37</version>
</dependency>
<dependency>
<groupId>commons-el</groupId>
@@ -218,6 +245,11 @@
<groupId>com.jcraft</groupId>
<artifactId>jsch</artifactId>
</dependency>
+ <dependency>
+ <groupId>com.google.code.findbugs</groupId>
+ <artifactId>jsr305</artifactId>
+ <version>1.3.9</version>
+ </dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
index 4adc306..995657f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
@@ -42,6 +42,7 @@
import org.apache.hadoop.fs.s3.INode.FileType;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
@@ -60,8 +61,8 @@
private static final String FILE_SYSTEM_VERSION_NAME = "fs-version";
private static final String FILE_SYSTEM_VERSION_VALUE = "1";
- private static final Map<String, String> METADATA =
- new HashMap<String, String>();
+ private static final Map<String, Object> METADATA =
+ new HashMap<String, Object>();
static {
METADATA.put(FILE_SYSTEM_NAME, FILE_SYSTEM_VALUE);
@@ -173,6 +174,9 @@ private InputStream get(String key, boolean checkMetadata)
}
throw new S3Exception(e);
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
private InputStream get(String key, long byteRangeStart) throws IOException {
@@ -189,6 +193,9 @@ private InputStream get(String key, long byteRangeStart) throws IOException {
}
throw new S3Exception(e);
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
private void checkMetadata(S3Object object) throws S3FileSystemException,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
index 416bfb1..32fe6b6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.util.ToolRunner;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
@@ -248,6 +249,9 @@ private InputStream get(String key) throws IOException {
}
throw new S3Exception(e);
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
private String pathToKey(Path path) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
index 400419c..f54d58f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
@@ -37,6 +37,7 @@
import org.jets3t.service.S3ObjectsChunk;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
@@ -124,12 +125,15 @@ public FileMetadata retrieveMetadata(String key) throws IOException {
@Override
public InputStream retrieve(String key) throws IOException {
try {
- S3Object object = s3Service.getObject(bucket, key);
+ S3Object object = s3Service.getObject(bucket.getName(), key);
return object.getDataInputStream();
} catch (S3ServiceException e) {
handleServiceException(key, e);
return null; //never returned - keep compiler happy
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
@Override
@@ -143,6 +147,9 @@ public InputStream retrieve(String key, long byteRangeStart)
handleServiceException(key, e);
return null; //never returned - keep compiler happy
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
@Override
@@ -165,7 +172,7 @@ private PartialListing list(String prefix, String delimiter,
if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
prefix += PATH_DELIMITER;
}
- S3ObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(),
+ S3ObjectsChunk chunk = (S3ObjectsChunk)s3Service.listObjectsChunked(bucket.getName(),
prefix, delimiter, maxListingLength, priorLastKey);
FileMetadata[] fileMetadata =
@@ -181,6 +188,9 @@ private PartialListing list(String prefix, String delimiter,
handleServiceException(e);
return null; //never returned - keep compiler happy
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
@Override
@@ -190,6 +200,9 @@ public void delete(String key) throws IOException {
} catch (S3ServiceException e) {
handleServiceException(key, e);
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
@Override
@@ -200,6 +213,9 @@ public void copy(String srcKey, String dstKey) throws IOException {
} catch (S3ServiceException e) {
handleServiceException(srcKey, e);
}
+ catch (ServiceException e) {
+ throw new S3Exception(e);
+ }
}
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
index 9e318ae..949db05 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java
@@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.mortbay.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.DefaultServlet;
/**
* General servlet which is admin-authorized.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
index 7b5b17e..009403b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
@@ -60,27 +60,28 @@
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ReflectionUtils;
-import org.mortbay.io.Buffer;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.Handler;
-import org.mortbay.jetty.MimeTypes;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.handler.ContextHandler;
-import org.mortbay.jetty.handler.ContextHandlerCollection;
-import org.mortbay.jetty.nio.SelectChannelConnector;
-import org.mortbay.jetty.security.SslSocketConnector;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.DefaultServlet;
-import org.mortbay.jetty.servlet.FilterHolder;
-import org.mortbay.jetty.servlet.FilterMapping;
-import org.mortbay.jetty.servlet.ServletHandler;
-import org.mortbay.jetty.servlet.ServletHolder;
-import org.mortbay.jetty.webapp.WebAppContext;
-import org.mortbay.thread.QueuedThreadPool;
-import org.mortbay.util.MultiException;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.http.MimeTypes;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.ContextHandler;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.FilterMapping;
+import org.eclipse.jetty.servlet.ServletHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.util.MultiException;
import com.sun.jersey.spi.container.servlet.ServletContainer;
+
/**
* Create a Jetty embedded server to answer http requests. The primary goal
* is to serve up status information for the server.
@@ -111,11 +112,12 @@
private SSLFactory sslFactory;
protected final Server webServer;
- protected final Connector listener;
+ protected final ContextHandlerCollection contexts;
+ protected final ServerConnector listener;
protected final WebAppContext webAppContext;
protected final boolean findPort;
- protected final Map<Context, Boolean> defaultContexts =
- new HashMap<Context, Boolean>();
+ protected final Map<ServletContextHandler, Boolean> defaultContexts =
+ new HashMap<ServletContextHandler, Boolean>();
protected final List<String> filterNames = new ArrayList<String>();
private static final int MAX_RETRIES = 10;
static final String STATE_DESCRIPTION_ALIVE = " - alive";
@@ -126,12 +128,12 @@
/** Same as this(name, bindAddress, port, findPort, null); */
public HttpServer(String name, String bindAddress, int port, boolean findPort
) throws IOException {
- this(name, bindAddress, port, findPort, new Configuration());
+ this(name, bindAddress, port, findPort, new Configuration(), null, null);
}
public HttpServer(String name, String bindAddress, int port,
- boolean findPort, Configuration conf, Connector connector) throws IOException {
- this(name, bindAddress, port, findPort, conf, null, connector, null);
+ boolean findPort, Configuration conf, ServerConnector connector) throws IOException {
+ this(name, bindAddress, port, findPort, conf, null, connector, null, null);
}
/**
@@ -150,7 +152,7 @@ public HttpServer(String name, String bindAddress, int port,
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
- this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
+ this(name, bindAddress, port, findPort, conf, null, null, pathSpecs, null);
}
/**
@@ -164,13 +166,13 @@ public HttpServer(String name, String bindAddress, int port,
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf) throws IOException {
- this(name, bindAddress, port, findPort, conf, null, null, null);
+ this(name, bindAddress, port, findPort, conf, null, null, null, null);
}
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl)
throws IOException {
- this(name, bindAddress, port, findPort, conf, adminsAcl, null, null);
+ this(name, bindAddress, port, findPort, conf, adminsAcl, null, null, null);
}
/**
@@ -186,8 +188,8 @@ public HttpServer(String name, String bindAddress, int port,
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl,
- Connector connector) throws IOException {
- this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null);
+ ServerConnector connector) throws IOException {
+ this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null, null);
}
/**
@@ -206,11 +208,17 @@ public HttpServer(String name, String bindAddress, int port,
*/
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl,
- Connector connector, String[] pathSpecs) throws IOException {
- webServer = new Server();
+ ServerConnector connector, String[] pathSpecs,
+ Server server) throws IOException {
this.findPort = findPort;
this.adminsAcl = adminsAcl;
+ if(server == null) {
+ webServer = createServer(conf);
+ } else {
+ webServer = server;
+ }
+
if(connector == null) {
listenerStartedExternally = false;
if (HttpConfig.isSecure()) {
@@ -220,11 +228,18 @@ public HttpServer(String name, String bindAddress, int port,
} catch (GeneralSecurityException ex) {
throw new IOException(ex);
}
- SslSocketConnector sslListener = new SslSocketConnector() {
- @Override
- protected SSLServerSocketFactory createFactory() throws Exception {
- return sslFactory.createSSLServerSocketFactory();
- }
+ // Jetty 8+ moved JKS config to SslContextFactory
+ SslContextFactory sslContextFactory = new SslContextFactory(conf.get("ssl.server.keystore.location",""));
+ sslContextFactory.setKeyStorePassword(conf.get("ssl.server.keystore.password",""));
+ if (sslFactory.isClientCertRequired()) {
+ sslContextFactory.setTrustStorePath(conf.get("ssl.server.truststore.location",""));
+ sslContextFactory.setTrustStorePassword(conf.get("ssl.server.truststore.password",""));
+ sslContextFactory.setTrustStoreType(conf.get("ssl.server.truststore.type", "jks"));
+ }
+ ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory) {
+ protected SSLServerSocketFactory createFactory() throws Exception {
+ return sslFactory.createSSLServerSocketFactory();
+ }
};
listener = sslListener;
} else {
@@ -239,17 +254,8 @@ protected SSLServerSocketFactory createFactory() throws Exception {
webServer.addConnector(listener);
- int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
- // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
- // default value (currently 250).
- QueuedThreadPool threadPool = maxThreads == -1 ?
- new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
- threadPool.setDaemon(true);
- webServer.setThreadPool(threadPool);
-
final String appDir = getWebAppsPath(name);
- ContextHandlerCollection contexts = new ContextHandlerCollection();
- webServer.setHandler(contexts);
+ contexts = new ContextHandlerCollection();
webAppContext = new WebAppContext();
webAppContext.setDisplayName(name);
@@ -258,7 +264,8 @@ protected SSLServerSocketFactory createFactory() throws Exception {
webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
addNoCacheFilter(webAppContext);
- webServer.addHandler(webAppContext);
+ contexts.addHandler(webAppContext);
+ webServer.setHandler(contexts);
addDefaultApps(contexts, appDir, conf);
@@ -293,19 +300,30 @@ private void addNoCacheFilter(WebAppContext ctxt) {
* provided. This wrapper and all subclasses must create at least one
* listener.
*/
- public Connector createBaseListener(Configuration conf) throws IOException {
- return HttpServer.createDefaultChannelConnector();
+ public ServerConnector createBaseListener(Configuration conf) throws IOException {
+ return HttpServer.createDefaultChannelConnector(webServer);
}
@InterfaceAudience.Private
- public static Connector createDefaultChannelConnector() {
- SelectChannelConnector ret = new SelectChannelConnector();
- ret.setLowResourceMaxIdleTime(10000);
- ret.setAcceptQueueSize(128);
- ret.setResolveNames(false);
- ret.setUseDirectBuffers(false);
- ret.setHeaderBufferSize(1024*64);
- return ret;
+ public static ServerConnector createDefaultChannelConnector(Server server) {
+ HttpConfiguration http_config = new HttpConfiguration();
+ http_config.setRequestHeaderSize(1024*64);
+
+ ServerConnector conn = new ServerConnector(server, new HttpConnectionFactory(http_config));
+ conn.setAcceptQueueSize(128);
+ conn.setIdleTimeout(10000);
+ return conn;
+ }
+
+ @InterfaceAudience.Private
+ public static Server createServer(Configuration conf) {
+ int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
+ // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
+ // default value (currently 250).
+ QueuedThreadPool threadPool = maxThreads == -1 ?
+ new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
+ threadPool.setDaemon(true);
+ return new Server(threadPool);
}
/** Get an array of FilterConfiguration specified in the conf */
@@ -337,14 +355,14 @@ protected void addDefaultApps(ContextHandlerCollection parent,
// set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = System.getProperty("hadoop.log.dir");
if (logDir != null) {
- Context logContext = new Context(parent, "/logs");
+ ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
logContext.setResourceBase(logDir);
logContext.addServlet(AdminAuthorizedServlet.class, "/*");
if (conf.getBoolean(
CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
logContext.getInitParams().put(
- "org.mortbay.jetty.servlet.Default.aliases", "true");
+ "org.eclipse.jetty.servlet.Default.aliases", "true");
}
logContext.setDisplayName("logs");
setContextAttributes(logContext, conf);
@@ -352,7 +370,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
defaultContexts.put(logContext, true);
}
// set up the context for "/static/*"
- Context staticContext = new Context(parent, "/static");
+ ServletContextHandler staticContext = new ServletContextHandler(parent, "/static");
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.setDisplayName("static");
@@ -360,7 +378,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
defaultContexts.put(staticContext, true);
}
- private void setContextAttributes(Context context, Configuration conf) {
+ private void setContextAttributes(ServletContextHandler context, Configuration conf) {
context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
}
@@ -377,10 +395,11 @@ protected void addDefaultServlets() {
addServlet("conf", "/conf", ConfServlet.class);
}
- public void addContext(Context ctxt, boolean isFiltered)
+ public void addContext(ServletContextHandler ctxt, boolean isFiltered)
throws IOException {
- webServer.addHandler(ctxt);
addNoCacheFilter(webAppContext);
+ contexts.addHandler(ctxt);
+ webServer.setHandler(contexts);
defaultContexts.put(ctxt, isFiltered);
}
@@ -481,7 +500,7 @@ public void addInternalServlet(String name, String pathSpec,
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setFilterName(SPNEGO_FILTER);
- fmap.setDispatches(Handler.ALL);
+ fmap.setDispatches(FilterMapping.ALL);
handler.addFilterMapping(fmap);
}
}
@@ -495,9 +514,9 @@ public void addFilter(String name, String classname,
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + webAppContext.getDisplayName());
final String[] ALL_URLS = { "/*" };
- for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) {
+ for (Map.Entry<ServletContextHandler, Boolean> e : defaultContexts.entrySet()) {
if (e.getValue()) {
- Context ctx = e.getKey();
+ ServletContextHandler ctx = e.getKey();
defineFilter(ctx, name, classname, parameters, ALL_URLS);
LOG.info("Added filter " + name + " (class=" + classname
+ ") to context " + ctx.getDisplayName());
@@ -511,7 +530,7 @@ public void addGlobalFilter(String name, String classname,
Map<String, String> parameters) {
final String[] ALL_URLS = { "/*" };
defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
- for (Context ctx : defaultContexts.keySet()) {
+ for (ServletContextHandler ctx : defaultContexts.keySet()) {
defineFilter(ctx, name, classname, parameters, ALL_URLS);
}
LOG.info("Added global filter '" + name + "' (class=" + classname + ")");
@@ -520,16 +539,18 @@ public void addGlobalFilter(String name, String classname,
/**
* Define a filter for a context and set up default url mappings.
*/
- protected void defineFilter(Context ctx, String name,
+ protected void defineFilter(ServletContextHandler ctx, String name,
String classname, Map<String,String> parameters, String[] urls) {
FilterHolder holder = new FilterHolder();
holder.setName(name);
holder.setClassName(classname);
- holder.setInitParameters(parameters);
+ if (null != parameters) {
+ holder.setInitParameters(parameters);
+ }
FilterMapping fmap = new FilterMapping();
fmap.setPathSpecs(urls);
- fmap.setDispatches(Handler.ALL);
+ fmap.setDispatches(FilterMapping.ALL);
fmap.setFilterName(name);
ServletHandler handler = ctx.getServletHandler();
handler.addFilter(holder, fmap);
@@ -541,13 +562,13 @@ protected void defineFilter(Context ctx, String name,
* @param webAppCtx The WebApplicationContext to add to
*/
protected void addFilterPathMapping(String pathSpec,
- Context webAppCtx) {
+ ServletContextHandler webAppCtx) {
ServletHandler handler = webAppCtx.getServletHandler();
for(String name : filterNames) {
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setFilterName(name);
- fmap.setDispatches(Handler.ALL);
+ fmap.setDispatches(FilterMapping.ALL);
handler.addFilterMapping(fmap);
}
}
@@ -581,7 +602,7 @@ protected String getWebAppsPath(String appName) throws FileNotFoundException {
* @return the port
*/
public int getPort() {
- return webServer.getConnectors()[0].getLocalPort();
+ return ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
}
/**
@@ -607,12 +628,12 @@ public void addSslListener(InetSocketAddress addr, String keystore,
if (webServer.isStarted()) {
throw new IOException("Failed to add ssl listener");
}
- SslSocketConnector sslListener = new SslSocketConnector();
+ SslContextFactory sslContextFactory = new SslContextFactory(keystore);
+ sslContextFactory.setKeyStorePassword(storPass);
+ sslContextFactory.setKeyManagerPassword(keyPass);
+ ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory);
sslListener.setHost(addr.getHostName());
sslListener.setPort(addr.getPort());
- sslListener.setKeystore(keystore);
- sslListener.setPassword(storPass);
- sslListener.setKeyPassword(keyPass);
webServer.addConnector(sslListener);
}
@@ -636,14 +657,14 @@ public void addSslListener(InetSocketAddress addr, Configuration sslConf,
System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
"ssl.server.truststore.type", "jks"));
}
- SslSocketConnector sslListener = new SslSocketConnector();
+ SslContextFactory sslContextFactory = new SslContextFactory(sslConf.get("ssl.server.keystore.location",""));
+ sslContextFactory.setKeyStorePassword(sslConf.get("ssl.server.keystore.password", ""));
+ sslContextFactory.setKeyManagerPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
+ sslContextFactory.setKeyStoreType(sslConf.get("ssl.server.keystore.type", "jks"));
+ sslContextFactory.setNeedClientAuth(needCertsAuth);
+ ServerConnector sslListener = new ServerConnector(webServer, sslContextFactory);
sslListener.setHost(addr.getHostName());
sslListener.setPort(addr.getPort());
- sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
- sslListener.setPassword(sslConf.get("ssl.server.keystore.password", ""));
- sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
- sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks"));
- sslListener.setNeedClientAuth(needCertsAuth);
webServer.addConnector(sslListener);
}
@@ -1075,10 +1096,10 @@ public void doFilter(ServletRequest request,
*/
private String inferMimeType(ServletRequest request) {
String path = ((HttpServletRequest)request).getRequestURI();
- ContextHandler.SContext sContext = (ContextHandler.SContext)config.getServletContext();
- MimeTypes mimes = sContext.getContextHandler().getMimeTypes();
- Buffer mimeBuffer = mimes.getMimeByExtension(path);
- return (mimeBuffer == null) ? null : mimeBuffer.toString();
+ ContextHandler.Context context = (ContextHandler.Context)config.getServletContext();
+ MimeTypes mimes = context.getContextHandler().getMimeTypes();
+ String mimeBuffer = mimes.getMimeByExtension(path);
+ return (mimeBuffer == null) ? null : mimeBuffer;
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
index af469f9..a6096cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
@@ -36,8 +36,8 @@
import org.apache.hadoop.metrics.spi.OutputRecord;
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
-import org.mortbay.util.ajax.JSON;
-import org.mortbay.util.ajax.JSON.Output;
+import org.eclipse.jetty.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON.Output;
/**
* A servlet to print out metrics data. By default, the servlet returns a
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
index ec6c7c8..0381020 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
@@ -38,7 +38,7 @@
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.commons.math.util.MathUtils;
+import org.apache.commons.math3.util.ArithmeticUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
@@ -460,7 +460,7 @@ private synchronized void configureSinks() {
MetricsConfig conf = entry.getValue();
int sinkPeriod = conf.getInt(PERIOD_KEY, PERIOD_DEFAULT);
confPeriod = confPeriod == 0 ? sinkPeriod
- : MathUtils.gcd(confPeriod, sinkPeriod);
+ : ArithmeticUtils.gcd(confPeriod, sinkPeriod);
String clsName = conf.getClassName("");
if (clsName == null) continue; // sink can be registered later on
String sinkName = entry.getKey();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
index 83729b1..52a2ebf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
@@ -28,7 +28,7 @@
*/
@InterfaceAudience.Private
public enum JvmMetricsInfo implements MetricsInfo {
- JvmMetrics("JVM related metrics etc."), // record infoß
+ JvmMetrics("JVM related metrics etc."), // record info
// metrics
MemNonHeapUsedM("Non-heap memory used in MB"),
MemNonHeapCommittedM("Non-heap memory committed in MB"),
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
index 1c22ee6..90846d9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
@@ -23,7 +23,7 @@
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
@@ -107,4 +107,4 @@ public void testBadFormat() throws Exception {
}
assertEquals("", sw.toString());
}
-}
\ No newline at end of file
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
index e9677ba..7cf8fe2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
@@ -32,7 +32,7 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@@ -771,7 +771,7 @@ public void testRenameNonExistentPath() throws Exception {
rename(src, dst, false, false, false, Rename.NONE);
Assert.fail("Should throw FileNotFoundException");
} catch (IOException e) {
- Log.info("XXX", e);
+ Log.getRootLogger().info("XXX", e);
Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
index 446b38e..02e2a39 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.fs.FsConstants;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.ConfigUtil;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
/**
@@ -83,7 +83,7 @@ static public FileSystem setupForViewFileSystem(Configuration conf, FileSystem f
FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
fsView.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
- Log.info("Working dir is: " + fsView.getWorkingDirectory());
+ Log.getRootLogger().info("Working dir is: " + fsView.getWorkingDirectory());
return fsView;
}
@@ -110,12 +110,12 @@ static void setUpHomeDir(Configuration conf, FileSystem fsTarget) {
} else { // home dir is at root. Just link the home dir itse
URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
ConfigUtil.addLink(conf, homeDir, linkTarget);
- Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
+ Log.getRootLogger().info("Added link for home dir " + homeDir + "->" + linkTarget);
}
// Now set the root of the home dir for viewfs
String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
ConfigUtil.setHomeDirConf(conf, homeDirRoot);
- Log.info("Home dir base for viewfs" + homeDirRoot);
+ Log.getRootLogger().info("Home dir base for viewfs" + homeDirRoot);
}
/*
@@ -127,7 +127,7 @@ static void linkUpFirstComponents(Configuration conf, String path, FileSystem fs
String firstComponent = path.substring(0, indexOf2ndSlash);
URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
ConfigUtil.addLink(conf, firstComponent, linkTarget);
- Log.info("Added link for " + info + " "
+ Log.getRootLogger().info("Added link for " + info + " "
+ firstComponent + "->" + linkTarget);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
index ac63217..6e5879c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.fs.FsConstants;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.ConfigUtil;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
/**
@@ -81,7 +81,7 @@ static public FileContext setupForViewFsLocalFs() throws Exception {
FileContext fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
fc.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
- Log.info("Working dir is: " + fc.getWorkingDirectory());
+ Log.getRootLogger().info("Working dir is: " + fc.getWorkingDirectory());
//System.out.println("SRCOfTests = "+ getTestRootPath(fc, "test"));
//System.out.println("TargetOfTests = "+ targetOfTests.toUri());
return fc;
@@ -106,12 +106,12 @@ static void setUpHomeDir(Configuration conf, FileContext fsTarget) {
} else { // home dir is at root. Just link the home dir itse
URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
ConfigUtil.addLink(conf, homeDir, linkTarget);
- Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
+ Log.getRootLogger().info("Added link for home dir " + homeDir + "->" + linkTarget);
}
// Now set the root of the home dir for viewfs
String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
ConfigUtil.setHomeDirConf(conf, homeDirRoot);
- Log.info("Home dir base for viewfs" + homeDirRoot);
+ Log.getRootLogger().info("Home dir base for viewfs" + homeDirRoot);
}
/*
@@ -124,7 +124,7 @@ static void linkUpFirstComponents(Configuration conf, String path,
String firstComponent = path.substring(0, indexOf2ndSlash);
URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
ConfigUtil.addLink(conf, firstComponent, linkTarget);
- Log.info("Added link for " + info + " "
+ Log.getRootLogger().info("Added link for " + info + " "
+ firstComponent + "->" + linkTarget);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
index 079bc37..f0e1f17 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
@@ -60,8 +60,9 @@
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
+import static org.junit.matchers.JUnitMatchers.*;
import org.mockito.Mockito;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
public class TestHttpServer extends HttpServerFunctionalTest {
static final Log LOG = LogFactory.getLog(TestHttpServer.class);
@@ -239,7 +240,7 @@ public void run() {
conn = (HttpURLConnection)servletUrl.openConnection();
conn.connect();
assertEquals(200, conn.getResponseCode());
- assertEquals("text/plain; charset=utf-8", conn.getContentType());
+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/plain")).and(containsString("charset=utf-8")));
// We should ignore parameters for mime types - ie a parameter
// ending in .css should not change mime type
@@ -247,21 +248,21 @@ public void run() {
conn = (HttpURLConnection)servletUrl.openConnection();
conn.connect();
assertEquals(200, conn.getResponseCode());
- assertEquals("text/plain; charset=utf-8", conn.getContentType());
+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/plain")).and(containsString("charset=utf-8")));
// Servlets that specify text/html should get that content type
servletUrl = new URL(baseUrl, "/htmlcontent");
conn = (HttpURLConnection)servletUrl.openConnection();
conn.connect();
assertEquals(200, conn.getResponseCode());
- assertEquals("text/html; charset=utf-8", conn.getContentType());
+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8")));
// JSPs should default to text/html with utf8
servletUrl = new URL(baseUrl, "/testjsp.jsp");
conn = (HttpURLConnection)servletUrl.openConnection();
conn.connect();
assertEquals(200, conn.getResponseCode());
- assertEquals("text/html; charset=utf-8", conn.getContentType());
+ assertThat(conn.getContentType().toLowerCase(),both(containsString("text/html")).and(containsString("charset=utf-8")));
}
/**
@@ -530,8 +531,8 @@ public void testRequiresAuthorizationAccess() throws Exception {
// try to reuse the port
port = myServer2.getListenerAddress().getPort();
myServer2.stop();
- assertEquals(-1, myServer2.getPort()); // not bound
- myServer2.openListener();
+ assert(myServer2.getPort()==-1 || myServer2.getPort()==-2); // jetty8 has 2 getLocalPort err values
+ myServer2.start();
assertEquals(port, myServer2.getPort()); // expect same port
} finally {
myServer.stop();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
index 880804e..80cfd1d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
@@ -76,6 +76,7 @@ public void setup() throws Exception {
conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
conf.addResource(CONFIG_SITE_XML);
+ conf.addResource(conf.get("hadoop.ssl.server.conf","ssl-server.xml"));
server = createServer("test", conf);
server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
server.start();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
index 3c01320..e9f7ed4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
@@ -171,8 +171,7 @@ public void testServletFilterWhenInitThrowsException() throws Exception {
http.start();
fail("expecting exception");
} catch (IOException e) {
- assertTrue(e.getMessage().contains(
- "Problem in starting http server. Server handlers failed"));
+ assertTrue(e.getMessage().toLowerCase().contains("problem"));
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
index f1313e2..52ea9b9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
@@ -32,7 +32,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* A simple Jersey resource class TestHttpServer.
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
index ec54f59..d289a03 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.metrics.MetricsServlet.TagsMetricsPair;
import org.apache.hadoop.metrics.spi.NoEmitMetricsContext;
import org.apache.hadoop.metrics.spi.OutputRecord;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
public class TestMetricsServlet extends TestCase {
MetricsContext nc1;
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/UnitTestcaseTimeLimit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/UnitTestcaseTimeLimit.java
index 5581c7d..e992fea 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/UnitTestcaseTimeLimit.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/UnitTestcaseTimeLimit.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.test;
import org.junit.Rule;
-import org.junit.rules.MethodRule;
+import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
/**
@@ -30,5 +30,5 @@
public class UnitTestcaseTimeLimit {
public final int timeOutSecs = 10;
- @Rule public MethodRule globalTimeout = new Timeout(timeOutSecs * 1000);
+ @Rule public TestRule globalTimeout = new Timeout(timeOutSecs * 1000);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java
index fe1284f..91c13a8 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java
@@ -32,9 +32,9 @@ public void testFindContainingJar() {
Assert.assertNotNull("Containing jar not found for Logger",
containingJar);
File jarFile = new File(containingJar);
- Assert.assertTrue("Containing jar does not exist on file system",
+ Assert.assertTrue("Containing jar does not exist on file system ",
jarFile.exists());
- Assert.assertTrue("Incorrect jar file" + containingJar,
- jarFile.getName().matches("log4j.+[.]jar"));
+ Assert.assertTrue("Incorrect jar file " + containingJar,
+ jarFile.getName().matches("log4j.*[.]jar"));
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
index e882f68..6027cf4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -34,7 +34,7 @@
<description>Apache Hadoop HttpFS</description>
<properties>
- <tomcat.version>6.0.36</tomcat.version>
+ <tomcat.version>7.0.37</tomcat.version>
<httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
<httpfs.source.repository>REPO NOT AVAIL</httpfs.source.repository>
<httpfs.source.revision>REVISION NOT AVAIL</httpfs.source.revision>
@@ -45,7 +45,7 @@
</httpfs.tomcat.dist.dir>
<kerberos.realm>LOCALHOST</kerberos.realm>
<test.exclude.kerberos.test>**/TestHttpFSWithKerberos.java</test.exclude.kerberos.test>
- <tomcat.download.url>http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz</tomcat.download.url>
+ <tomcat.download.url>http://archive.apache.org/dist/tomcat/tomcat-7/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz</tomcat.download.url>
</properties>
<dependencies>
@@ -90,8 +90,8 @@
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
<scope>test</scope>
</dependency>
<dependency>
@@ -108,12 +108,8 @@
<artifactId>commons-httpclient</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
@@ -128,20 +124,20 @@
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-api-2.1</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api-2.5</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
@@ -171,12 +167,8 @@
<artifactId>commons-httpclient</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
@@ -191,20 +183,20 @@
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-api-2.1</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api-2.5</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>net.java.dev.jets3t</groupId>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
index 02e1a71..621a2fa 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh
@@ -55,7 +55,7 @@ print "Setting HTTPFS_HOME: ${HTTPFS_HOME}"
#
if [ -e "${HTTPFS_HOME}/bin/httpfs-env.sh" ]; then
print "Sourcing: ${HTTPFS_HOME}/bin/httpfs-env.sh"
- source ${HTTPFS_HOME}/bin/HTTPFS-env.sh
+ source ${HTTPFS_HOME}/bin/httpfs-env.sh
grep "^ *export " ${HTTPFS_HOME}/bin/httpfs-env.sh | sed 's/ *export/ setting/'
fi
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
index a425bdd..30839f2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/tomcat/server.xml
@@ -1,8 +1,5 @@
<?xml version='1.0' encoding='utf-8'?>
<!--
-
- All Rights Reserved.
-
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
@@ -23,16 +20,17 @@
Documentation at /docs/config/server.html
-->
<Server port="${httpfs.admin.port}" shutdown="SHUTDOWN">
-
+ <!-- Security listener. Documentation at /docs/config/listeners.html
+ <Listener className="org.apache.catalina.security.SecurityListener" />
+ -->
<!--APR library loader. Documentation at /docs/apr.html -->
- <Listener className="org.apache.catalina.core.AprLifecycleListener" SSLEngine="on"/>
+ <Listener className="org.apache.catalina.core.AprLifecycleListener" SSLEngine="on" />
<!--Initialize Jasper prior to webapps are loaded. Documentation at /docs/jasper-howto.html -->
- <Listener className="org.apache.catalina.core.JasperListener"/>
+ <Listener className="org.apache.catalina.core.JasperListener" />
<!-- Prevent memory leaks due to use of particular java/javax APIs-->
- <Listener className="org.apache.catalina.core.JreMemoryLeakPreventionListener"/>
- <!-- JMX Support for the Tomcat server. Documentation at /docs/non-existent.html -->
- <Listener className="org.apache.catalina.mbeans.ServerLifecycleListener"/>
- <Listener className="org.apache.catalina.mbeans.GlobalResourcesLifecycleListener"/>
+ <Listener className="org.apache.catalina.core.JreMemoryLeakPreventionListener" />
+ <Listener className="org.apache.catalina.mbeans.GlobalResourcesLifecycleListener" />
+ <Listener className="org.apache.catalina.core.ThreadLocalLeakPreventionListener" />
<!-- Global JNDI resources
Documentation at /docs/jndi-resources-howto.html
@@ -45,7 +43,7 @@
type="org.apache.catalina.UserDatabase"
description="User database that can be updated and saved"
factory="org.apache.catalina.users.MemoryUserDatabaseFactory"
- pathname="conf/tomcat-users.xml"/>
+ pathname="conf/tomcat-users.xml" />
</GlobalNamingResources>
<!-- A "Service" is a collection of one or more "Connectors" that share
@@ -71,7 +69,7 @@
-->
<Connector port="${httpfs.http.port}" protocol="HTTP/1.1"
connectionTimeout="20000"
- redirectPort="8443"/>
+ redirectPort="8443" />
<!-- A "Connector" using the shared thread pool-->
<!--
<Connector executor="tomcatThreadPool"
@@ -93,10 +91,10 @@
<!-- An Engine represents the entry point (within Catalina) that processes
- every request. The Engine implementation for Tomcat stand alone
- analyzes the HTTP headers included with the request, and passes them
- on to the appropriate Host (virtual host).
- Documentation at /docs/config/engine.html -->
+ every request. The Engine implementation for Tomcat stand alone
+ analyzes the HTTP headers included with the request, and passes them
+ on to the appropriate Host (virtual host).
+ Documentation at /docs/config/engine.html -->
<!-- You should set jvmRoute to support load-balancing via AJP ie :
<Engine name="Catalina" defaultHost="localhost" jvmRoute="jvm1">
@@ -110,26 +108,19 @@
<Cluster className="org.apache.catalina.ha.tcp.SimpleTcpCluster"/>
-->
- <!-- The request dumper valve dumps useful debugging information about
- the request and response data received and sent by Tomcat.
- Documentation at: /docs/config/valve.html -->
- <!--
- <Valve className="org.apache.catalina.valves.RequestDumperValve"/>
- -->
-
- <!-- This Realm uses the UserDatabase configured in the global JNDI
- resources under the key "UserDatabase". Any edits
- that are performed against this UserDatabase are immediately
- available for use by the Realm. -->
- <Realm className="org.apache.catalina.realm.UserDatabaseRealm"
- resourceName="UserDatabase"/>
+ <!-- Use the LockOutRealm to prevent attempts to guess user passwords
+ via a brute-force attack -->
+ <Realm className="org.apache.catalina.realm.LockOutRealm">
+ <!-- This Realm uses the UserDatabase configured in the global JNDI
+ resources under the key "UserDatabase". Any edits
+ that are performed against this UserDatabase are immediately
+ available for use by the Realm. -->
+ <Realm className="org.apache.catalina.realm.UserDatabaseRealm"
+ resourceName="UserDatabase"/>
+ </Realm>
- <!-- Define the default virtual host
- Note: XML Schema validation will not work with Xerces 2.2.
- -->
- <Host name="localhost" appBase="webapps"
- unpackWARs="true" autoDeploy="true"
- xmlValidation="false" xmlNamespaceAware="false">
+ <Host name="localhost" appBase="webapps"
+ unpackWARs="true" autoDeploy="true">
<!-- SingleSignOn valve, share authentication between web applications
Documentation at: /docs/config/valve.html -->
@@ -138,11 +129,11 @@
-->
<!-- Access log processes all example.
- Documentation at: /docs/config/valve.html -->
- <!--
+ Documentation at: /docs/config/valve.html
+ Note: The pattern used is equivalent to using pattern="common" -->
<Valve className="org.apache.catalina.valves.AccessLogValve" directory="logs"
- prefix="localhost_access_log." suffix=".txt" pattern="common" resolveHosts="false"/>
- -->
+ prefix="localhost_access_log." suffix=".txt"
+ pattern="%h %l %u %t &quot;%r&quot; %s %b" />
</Host>
</Engine>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
index 3d96fd8..aa1486d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
@@ -42,8 +42,8 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppContext;
import java.io.File;
import java.io.FileOutputStream;
@@ -108,7 +108,7 @@ private void createHttpFSServer() throws Exception {
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
- server.addHandler(context);
+ server.setHandler(context);
server.start();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index 6057a48..adf85d5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -56,8 +56,8 @@
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppContext;
public class TestHttpFSServer extends HFSTestCase {
@@ -157,7 +157,7 @@ private void createHttpFSServer(boolean addDelegationTokenAuthHandler)
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
- server.addHandler(context);
+ server.setHandler(context);
server.start();
if (addDelegationTokenAuthHandler) {
HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
index 140f866..a42e70d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java
@@ -41,8 +41,8 @@
import org.json.simple.parser.JSONParser;
import org.junit.After;
import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.webapp.WebAppContext;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.webapp.WebAppContext;
import java.io.File;
import java.io.FileOutputStream;
@@ -105,7 +105,7 @@ private void createHttpFSServer() throws Exception {
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
- server.addHandler(context);
+ server.setHandler(context);
server.start();
HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
index eb2cdc6..3d13cf5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java
@@ -39,8 +39,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Time;
import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
public class TestHFSTestCase extends HFSTestCase {
@@ -165,11 +165,11 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws Se
@Test
@TestJetty
public void testJetty() throws Exception {
- Context context = new Context();
+ ServletContextHandler context = new ServletContextHandler();
context.setContextPath("/");
context.addServlet(MyServlet.class, "/bar");
Server server = TestJettyHelper.getJettyServer();
- server.addHandler(context);
+ server.setHandler(context);
server.start();
URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
index 74d34ec..8b7223a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java
@@ -34,8 +34,8 @@
import org.apache.hadoop.util.Time;
import org.junit.Test;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
public class TestHTestCase extends HTestCase {
@@ -132,11 +132,11 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws Se
@Test
@TestJetty
public void testJetty() throws Exception {
- Context context = new Context();
+ ServletContextHandler context = new ServletContextHandler();
context.setContextPath("/");
context.addServlet(MyServlet.class, "/bar");
Server server = TestJettyHelper.getJettyServer();
- server.addHandler(context);
+ server.setHandler(context);
server.start();
URL url = new URL(TestJettyHelper.getJettyURL(), "/bar");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
index 4442281..44cf67e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
@@ -28,7 +28,9 @@
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
-import org.mortbay.jetty.Server;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
public class TestJettyHelper implements MethodRule {
@@ -73,8 +75,10 @@ private Server createJettyServer() {
int port = ss.getLocalPort();
ss.close();
Server server = new Server(0);
- server.getConnectors()[0].setHost(host);
- server.getConnectors()[0].setPort(port);
+ ServerConnector connector = new ServerConnector(server);
+ connector.setHost(host);
+ connector.setPort(port);
+ server.setConnectors(new Connector[] { connector });
return server;
} catch (Exception ex) {
throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
@@ -90,8 +94,8 @@ public static InetSocketAddress getAuthority() {
Server server = getJettyServer();
try {
InetAddress add =
- InetAddress.getByName(server.getConnectors()[0].getHost());
- int port = server.getConnectors()[0].getPort();
+ InetAddress.getByName(((ServerConnector)server.getConnectors()[0]).getHost());
+ int port = ((ServerConnector)server.getConnectors()[0]).getPort();
return new InetSocketAddress(add, port);
} catch (UnknownHostException ex) {
throw new RuntimeException(ex);
@@ -128,7 +132,7 @@ public static URL getJettyURL() {
throw new IllegalStateException("This test does not use @TestJetty");
}
try {
- return new URL("http://" + server.getConnectors()[0].getHost() + ":" + server.getConnectors()[0].getPort());
+ return new URL("http://" + ((ServerConnector)server.getConnectors()[0]).getHost() + ":" + ((ServerConnector)server.getConnectors()[0]).getPort());
} catch (MalformedURLException ex) {
throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 02e393a..34ec2bb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -72,12 +72,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<scope>compile</scope>
</dependency>
@@ -122,11 +122,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>compile</scope>
@@ -137,11 +132,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
@@ -167,11 +157,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
<scope>compile</scope>
@@ -193,101 +178,71 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</configuration>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo.jspc</groupId>
- <artifactId>jspc-maven-plugin</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-jspc-maven-plugin</artifactId>
<executions>
<execution>
<id>hdfs</id>
- <phase>generate-sources</phase>
+ <phase>process-classes</phase>
<goals>
- <goal>compile</goal>
+ <goal>jspc</goal>
</goals>
<configuration>
- <compile>false</compile>
+ <webAppSourceDirectory>${basedir}/src/main/webapps/hdfs</webAppSourceDirectory>
+ <packageRoot>org.apache.hadoop.hdfs.server.namenode</packageRoot>
+ <includes>*.jsp</includes>
<workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
- <webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
- <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
- <sources>
- <directory>${basedir}/src/main/webapps/hdfs</directory>
- <includes>
- <include>*.jsp</include>
- </includes>
- </sources>
+ <webXmlFragment>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webXmlFragment>
</configuration>
</execution>
<execution>
<id>secondary</id>
- <phase>generate-sources</phase>
+ <phase>process-classes</phase>
<goals>
- <goal>compile</goal>
+ <goal>jspc</goal>
</goals>
<configuration>
- <compile>false</compile>
+ <webAppSourceDirectory>${basedir}/src/main/webapps/secondary</webAppSourceDirectory>
+ <packageRoot>org.apache.hadoop.hdfs.server.namenode</packageRoot>
+ <includes>*.jsp</includes>
<workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
- <webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
- <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
- <sources>
- <directory>${basedir}/src/main/webapps/secondary</directory>
- <includes>
- <include>*.jsp</include>
- </includes>
- </sources>
+ <webXmlFragment>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webXmlFragment>
</configuration>
</execution>
<execution>
<id>journal</id>
- <phase>generate-sources</phase>
+ <phase>process-classes</phase>
<goals>
- <goal>compile</goal>
+ <goal>jspc</goal>
</goals>
<configuration>
- <compile>false</compile>
+ <webAppSourceDirectory>${basedir}/src/main/webapps/journal</webAppSourceDirectory>
+ <packageRoot>org.apache.hadoop.hdfs.server.journalservice</packageRoot>
+ <includes>*.jsp</includes>
<workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
- <webFragmentFile>${project.build.directory}/journal-jsp-servlet-definitions.xml</webFragmentFile>
- <packageName>org.apache.hadoop.hdfs.server.journalservice</packageName>
- <sources>
- <directory>${basedir}/src/main/webapps/journal</directory>
- <includes>
- <include>*.jsp</include>
- </includes>
- </sources>
+ <webXmlFragment>${project.build.directory}/journal-jsp-servlet-definitions.xml</webXmlFragment>
</configuration>
</execution>
<execution>
<id>datanode</id>
- <phase>generate-sources</phase>
+ <phase>process-classes</phase>
<goals>
- <goal>compile</goal>
+ <goal>jspc</goal>
</goals>
<configuration>
- <compile>false</compile>
+ <webAppSourceDirectory>${basedir}/src/main/webapps/datanode</webAppSourceDirectory>
+ <packageRoot>org.apache.hadoop.hdfs.server.datanode</packageRoot>
+ <includes>*.jsp</includes>
<workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
- <webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
- <packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
- <sources>
- <directory>${basedir}/src/main/webapps/datanode</directory>
- <includes>
- <include>*.jsp</include>
- </includes>
- </sources>
+ <webXmlFragment>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webXmlFragment>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
- <groupId>org.codehaus.mojo.jspc</groupId>
- <artifactId>jspc-compiler-tomcat5</artifactId>
- <version>2.0-alpha-3</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>1.4.1</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>jcl104-over-slf4j</artifactId>
- <version>1.4.1</version>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${project.version}</version>
</dependency>
</dependencies>
</plugin>
@@ -330,7 +285,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</execution>
<execution>
<id>create-web-xmls</id>
- <phase>compile</phase>
+ <phase>process-classes</phase>
<goals>
<goal>run</goal>
</goals>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java
index 32b0583..4930816 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test/java/org/apache/hadoop/contrib/bkjournal/BKJMUtil.java
@@ -156,6 +156,13 @@ int checkBookiesUp(int count, int timeout) throws Exception {
List<String> children = zkc.getChildren("/ledgers/available",
false);
mostRecentSize = children.size();
+ // TODO: Bookkeeper 4.2.0 introduced "readonly" bookies
+ // which mess with test bookie counts;
+ // unclear why setReadOnlyModeEnabled(false) doesn't have
+ // backward-compat effect hoped for
+ if (children.contains("readonly")) {
+ mostRecentSize = children.size()-1;
+ }
if (LOG.isDebugEnabled()) {
LOG.debug("Found " + mostRecentSize + " bookies up, "
+ "waiting for " + count);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 0be46de..b2834b8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -171,7 +171,7 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.VersionInfo;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
@@ -366,7 +366,7 @@ private void startInfoServer(Configuration conf) throws IOException {
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")))
: new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0,
conf, new AccessControlList(conf.get(DFS_ADMIN, " ")),
- secureResources.getListener());
+ secureResources.getListener(), null, secureResources.getServer());
LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
index 0fda306..77c6c82 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
@@ -32,14 +32,15 @@
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.SSLFactory;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.nio.SelectChannelConnector;
-import org.mortbay.jetty.security.SslSocketConnector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
import javax.net.ssl.SSLServerSocketFactory;
import com.google.common.annotations.VisibleForTesting;
+
/**
* Utility class to start a datanode in a secure cluster, first obtaining
* privileged resources before main startup and handing them to the datanode.
@@ -50,17 +51,21 @@
*/
public static class SecureResources {
private final ServerSocket streamingSocket;
- private final Connector listener;
+ private final ServerConnector listener;
+ private final Server server;
public SecureResources(ServerSocket streamingSocket,
- Connector listener) {
+ ServerConnector listener, Server server) {
this.streamingSocket = streamingSocket;
this.listener = listener;
+ this.server = server;
}
public ServerSocket getStreamingSocket() { return streamingSocket; }
- public Connector getListener() { return listener; }
+ public ServerConnector getListener() { return listener; }
+
+ public Server getServer() { return server; }
}
private String [] args;
@@ -94,6 +99,9 @@ public void start() throws Exception {
@VisibleForTesting
public static SecureResources getSecureResources(final SSLFactory sslFactory,
Configuration conf) throws Exception {
+ // Create a server
+ Server server = HttpServer.createServer(conf);
+
// Obtain secure port for data streaming to datanode
InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf);
int socketWriteTimeout = conf.getInt(DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
@@ -110,22 +118,28 @@ public static SecureResources getSecureResources(final SSLFactory sslFactory,
}
// Obtain secure listener for web server
- Connector listener;
+ ServerConnector listener;
if (HttpConfig.isSecure()) {
try {
sslFactory.init();
} catch (GeneralSecurityException ex) {
throw new IOException(ex);
}
- SslSocketConnector sslListener = new SslSocketConnector() {
- @Override
- protected SSLServerSocketFactory createFactory() throws Exception {
- return sslFactory.createSSLServerSocketFactory();
- }
+ SslContextFactory sslContextFactory = new SslContextFactory(conf.get("ssl.server.keystore.location",""));
+ sslContextFactory.setKeyStorePassword(conf.get("ssl.server.keystore.password",""));
+ if (sslFactory.isClientCertRequired()) {
+ sslContextFactory.setTrustStorePath(conf.get("ssl.server.truststore.location",""));
+ sslContextFactory.setTrustStorePassword(conf.get("ssl.server.truststore.password",""));
+ sslContextFactory.setTrustStoreType(conf.get("ssl.server.truststore.type", "jks"));
+ }
+ ServerConnector sslListener = new ServerConnector(server, sslContextFactory) {
+ protected SSLServerSocketFactory createFactory() throws Exception {
+ return sslFactory.createSSLServerSocketFactory();
+ }
};
listener = sslListener;
} else {
- listener = HttpServer.createDefaultChannelConnector();
+ listener = HttpServer.createDefaultChannelConnector(server);
}
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
@@ -138,7 +152,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
"context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort());
}
System.err.println("Successfully obtained privileged resources (streaming port = "
- + ss + " ) (http listener port = " + listener.getConnection() +")");
+ + ss + " ) (http listener port = " + listener.getLocalPort() +")");
if ((ss.getLocalPort() > 1023 || listener.getPort() > 1023) &&
UserGroupInformation.isSecurityEnabled()) {
@@ -146,7 +160,7 @@ protected SSLServerSocketFactory createFactory() throws Exception {
}
System.err.println("Opened streaming server at " + streamingAddr);
System.err.println("Opened info server at " + infoSocAddr);
- return new SecureResources(ss, listener);
+ return new SecureResources(ss, listener, server);
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 8c114c3..7544cd1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -201,7 +201,7 @@
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.VersionInfo;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
index aa4ba5d..5b945ba 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
@@ -39,7 +39,7 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
-import org.mortbay.jetty.InclusiveByteRange;
+import org.eclipse.jetty.server.InclusiveByteRange;
@InterfaceAudience.Private
public class StreamFile extends DfsServlet {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
index f251e34..bed9d58 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
@@ -47,7 +47,7 @@
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.StringUtils;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/** JSON Utilities */
public class JsonUtil {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 5b32826..f130da1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -82,7 +82,7 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
index cf624b7..6ae1a20 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.VersionInfo;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* Class for testing {@link NameNodeMXBean} implementation
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
index daaa6d8..683f414 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
@@ -46,7 +46,7 @@
import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
import org.mockito.Mockito;
-import org.mortbay.jetty.InclusiveByteRange;
+import org.eclipse.jetty.server.InclusiveByteRange;
/*
* Mock input stream class that always outputs the current position of the stream.
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
index aa2393d..432f92e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
@@ -27,7 +27,7 @@
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
public class TestJsonUtil {
static FileStatus toFileStatus(HdfsFileStatus f, String parent) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
index 7029f42..c7023c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/MiniDFSClusterManager.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* This class drives the creation of a mini-cluster on the local machine. By
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
index 518305f..87be820 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/JobEndNotifier.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
/**
* <p>This class handles job end notification. Submitters of jobs can choose to
@@ -97,10 +97,10 @@ public void setConf(Configuration conf) {
int port = Integer.parseInt(portConf);
proxyToUse = new Proxy(proxyType,
new InetSocketAddress(hostname, port));
- Log.info("Job end notification using proxy type \"" + proxyType +
+ Log.getRootLogger().info("Job end notification using proxy type \"" + proxyType +
"\" hostname \"" + hostname + "\" and port \"" + port + "\"");
} catch(NumberFormatException nfe) {
- Log.warn("Job end notification couldn't parse configured proxy's port "
+ Log.getRootLogger().warn("Job end notification couldn't parse configured proxy's port "
+ portConf + ". Not going to use a proxy");
}
}
@@ -118,23 +118,23 @@ public Configuration getConf() {
protected boolean notifyURLOnce() {
boolean success = false;
try {
- Log.info("Job end notification trying " + urlToNotify);
+ Log.getRootLogger().info("Job end notification trying " + urlToNotify);
HttpURLConnection conn =
(HttpURLConnection) urlToNotify.openConnection(proxyToUse);
conn.setConnectTimeout(5*1000);
conn.setReadTimeout(5*1000);
conn.setAllowUserInteraction(false);
if(conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
- Log.warn("Job end notification to " + urlToNotify +" failed with code: "
+ Log.getRootLogger().warn("Job end notification to " + urlToNotify +" failed with code: "
+ conn.getResponseCode() + " and message \"" + conn.getResponseMessage()
+"\"");
}
else {
success = true;
- Log.info("Job end notification to " + urlToNotify + " succeeded");
+ Log.getRootLogger().info("Job end notification to " + urlToNotify + " succeeded");
}
} catch(IOException ioe) {
- Log.warn("Job end notification to " + urlToNotify + " failed", ioe);
+ Log.getRootLogger().warn("Job end notification to " + urlToNotify + " failed", ioe);
}
return success;
}
@@ -149,7 +149,7 @@ public void notify(JobReport jobReport)
throws InterruptedException {
// Do we need job-end notification?
if (userUrl == null) {
- Log.info("Job end notification URL not set, skipping.");
+ Log.getRootLogger().info("Job end notification URL not set, skipping.");
return;
}
@@ -165,23 +165,23 @@ public void notify(JobReport jobReport)
try {
urlToNotify = new URL(userUrl);
} catch (MalformedURLException mue) {
- Log.warn("Job end notification couldn't parse " + userUrl, mue);
+ Log.getRootLogger().warn("Job end notification couldn't parse " + userUrl, mue);
return;
}
// Send notification
boolean success = false;
while (numTries-- > 0 && !success) {
- Log.info("Job end notification attempts left " + numTries);
+ Log.getRootLogger().info("Job end notification attempts left " + numTries);
success = notifyURLOnce();
if (!success) {
Thread.sleep(waitInterval);
}
}
if (!success) {
- Log.warn("Job end notification failed to notify : " + urlToNotify);
+ Log.getRootLogger().warn("Job end notification failed to notify : " + urlToNotify);
} else {
- Log.info("Job end notification succeeded for " + jobReport.getJobId());
+ Log.getRootLogger().info("Job end notification succeeded for " + jobReport.getJobId());
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
index 0b23c95..b08b854 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -238,7 +238,7 @@ public void testJobsQueryStateNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -304,7 +304,7 @@ public void testJobsQueryUserNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -389,7 +389,7 @@ public void testJobsQueryQueueNonExist() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -421,7 +421,7 @@ public void testJobsQueryStartTimeBegin() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
@@ -741,7 +741,7 @@ public void testJobsQueryFinishTimeEnd() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ assertEquals("jobs is not None", 0, json.getJSONObject("jobs").length());
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
index d2ea74e..32d6b0e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.mapred;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text;
@@ -69,7 +70,7 @@ private void startHttpServer() throws Exception {
}
webServer = new Server(0);
- Context context = new Context(webServer, contextPath);
+ ServletContextHandler context = new ServletContextHandler(webServer, contextPath);
// create servlet handler
context.addServlet(new ServletHolder(new NotificationServlet()),
@@ -77,7 +78,7 @@ private void startHttpServer() throws Exception {
// Start webServer
webServer.start();
- port = webServer.getConnectors()[0].getLocalPort();
+ port = ((ServerConnector) webServer.getConnectors()[0]).getLocalPort();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
index 35b5e30..91964bd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
@@ -45,7 +45,7 @@
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* This class drives the creation of a mini-cluster on the local machine. By
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
index 1581bab..48d0f6b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
@@ -43,8 +43,8 @@
<artifactId>avro</artifactId>
<exclusions>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.ant</groupId>
@@ -78,16 +78,8 @@
<artifactId>commons-el</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-2.1-jetty</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -158,6 +150,11 @@
<artifactId>commons-lang</artifactId>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ <scope>provided</scope>
+ </dependency>
</dependencies>
<build>
diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
index 14686e7..84cb12c 100644
--- a/hadoop-mapreduce-project/pom.xml
+++ b/hadoop-mapreduce-project/pom.xml
@@ -53,8 +53,8 @@
<artifactId>avro</artifactId>
<exclusions>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.ant</groupId>
@@ -88,16 +88,8 @@
<artifactId>commons-el</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-2.1-jetty</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
</exclusions>
</dependency>
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 0e45314..b19e788 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -290,8 +290,8 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
- <artifactId>commons-math</artifactId>
- <version>2.1</version>
+ <artifactId>commons-math3</artifactId>
+ <version>3.1.1</version>
</dependency>
<dependency>
<groupId>xmlenc</groupId>
@@ -316,23 +316,17 @@
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
- <version>2.5</version>
+ <version>3.0-alpha-1</version>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
- <version>6.1.26</version>
- <exclusions>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- </exclusions>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <version>9.0.4.v20130625</version>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
- <version>6.1.26</version>
+ <version>9.0.4.v20130625</version>
</dependency>
<dependency>
@@ -343,12 +337,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
<exclusions>
<exclusion>
<groupId>javax.xml.stream</groupId>
@@ -359,7 +353,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-servlet</artifactId>
+ <version>1.17.1</version>
</dependency>
<dependency>
@@ -367,6 +366,12 @@
<artifactId>guice</artifactId>
<version>3.0</version>
</dependency>
+
+ <dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ <version>2.2</version>
+ </dependency>
<dependency>
<groupId>com.google.inject.extensions</groupId>
@@ -377,19 +382,19 @@
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
- <version>1.8</version>
+ <version>1.17.1</version>
</dependency>
<dependency>
@@ -405,34 +410,9 @@
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty-servlet-tester</artifactId>
- <version>6.1.26</version>
- </dependency>
- <dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- <version>5.5.23</version>
- <exclusions>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>jsp-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ant</groupId>
- <artifactId>ant</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- <version>5.5.23</version>
- </dependency>
- <dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <version>2.1</version>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>test-jetty-servlet</artifactId>
+ <version>9.0.4.v20130625</version>
</dependency>
<dependency>
<groupId>commons-el</groupId>
@@ -440,6 +420,11 @@
<version>1.0</version>
</dependency>
<dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-jspc-maven-plugin</artifactId>
+ <version>9.0.4.v20130625</version>
+ </dependency>
+ <dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1.1</version>
@@ -493,7 +478,7 @@
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
- <version>0.6.1</version>
+ <version>0.9.0</version>
</dependency>
<dependency>
<groupId>org.apache.mina</groupId>
@@ -518,7 +503,7 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
- <version>4.8.2</version>
+ <version>4.10</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
@@ -608,7 +593,7 @@
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
- <version>2.4.0a</version>
+ <version>2.5.0</version>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
@@ -654,7 +639,7 @@
<dependency>
<groupId>org.apache.bookkeeper</groupId>
<artifactId>bookkeeper-server</artifactId>
- <version>4.0.0</version>
+ <version>4.2.1</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -747,9 +732,9 @@
<version>1.5.3</version>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo.jspc</groupId>
- <artifactId>jspc-maven-plugin</artifactId>
- <version>2.0-alpha-3</version>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-jspc-maven-plugin</artifactId>
+ <version>9.0.4.v20130625</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
diff --git a/hadoop-tools/hadoop-extras/pom.xml b/hadoop-tools/hadoop-extras/pom.xml
index c6406aa..8e5f7ee 100644
--- a/hadoop-tools/hadoop-extras/pom.xml
+++ b/hadoop-tools/hadoop-extras/pom.xml
@@ -89,6 +89,11 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
diff --git a/hadoop-tools/hadoop-streaming/pom.xml b/hadoop-tools/hadoop-streaming/pom.xml
index d6a0337..f43c1cd 100644
--- a/hadoop-tools/hadoop-streaming/pom.xml
+++ b/hadoop-tools/hadoop-streaming/pom.xml
@@ -95,6 +95,11 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
index 425d45b..13f2d8b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
@@ -195,7 +195,7 @@ public void setup() {
}
HttpServer server =
new HttpServer(name, bindAddress, port, findPort, conf,
- new AdminACLsManager(conf).getAdminAcl(), null, webapp.getServePathSpecs());
+ new AdminACLsManager(conf).getAdminAcl(), null, webapp.getServePathSpecs(), null);
for(ServletStruct struct: servlets) {
server.addServlet(struct.name, struct.spec, struct.clazz);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/example/MyApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/example/MyApp.java
index fbbf4f8..c502e18 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/example/MyApp.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/example/MyApp.java
@@ -32,7 +32,7 @@
public class MyApp {
// This is an app API
- public String anyAPI() { return "anything ☁, really!"; }
+ public String anyAPI() { return "anything, really!"; }
// Note this is static so it can be in any files.
public static class MyController extends Controller {
@@ -46,7 +46,7 @@
@Override
public void index() {
- set("anything", "something ☯");
+ set("anything", "something");
}
public void anythingYouWant() {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
index 5fdd957..47af85b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
@@ -57,7 +57,7 @@
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.PRE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
import com.google.inject.Inject;
@@ -308,7 +308,7 @@ private void printLogs(Block html, ContainerId containerId,
try {
logDir = new URI(logDir).getPath();
} catch (URISyntaxException e) {
- Log.warn(e.getMessage());
+ Log.getRootLogger().warn(e.getMessage());
}
String appIdStr = ConverterUtils.toString(containerId
.getApplicationAttemptId().getApplicationId());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
index 0e0a472..762e018 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
@@ -50,7 +50,7 @@
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test;
-import org.mortbay.log.Log;
+import org.eclipse.jetty.util.log.Log;
public class TestLocalResourcesTrackerImpl {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
index 168f619..81ff171 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -84,7 +84,7 @@
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
-import org.mortbay.util.MultiException;
+import org.eclipse.jetty.util.MultiException;
//@Ignore
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
index 39764e6..9e4427a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
@@ -174,7 +174,7 @@ public void testNodeAppsNone() throws JSONException, Exception {
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("apps isn't NULL", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps isn't None",0,json.getJSONObject("apps").length());
}
private HashMap<String, String> addAppContainers(Application app) {
@@ -281,7 +281,7 @@ public void testNodeAppsUserNone() throws JSONException, Exception {
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("apps is not null", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps is not None", 0, json.getJSONObject("apps").length());
}
@Test
@@ -363,7 +363,7 @@ public void testNodeAppsStateNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("apps is not null", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps is not None", 0, json.getJSONObject("apps").length());
}
@Test
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
index 1f678d4..ba27fdb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
@@ -175,7 +175,7 @@ public void testNodeContainersNone() throws JSONException, Exception {
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("apps isn't NULL", JSONObject.NULL, json.get("containers"));
+ assertEquals("apps isn't None", 0, json.getJSONObject("containers").length());
}
private HashMap<String, String> addAppContainers(Application app) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
index 0db42e4..988c21c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMNMInfo.java
@@ -33,7 +33,7 @@
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
/**
* JMX bean listing statuses of all node managers.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
index b81237d..183af03 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesApps.java
@@ -252,7 +252,7 @@ public void testAppsQueryStateNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("apps is not null", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps is not None", 0, json.getJSONObject("apps").length());
rm.stop();
}
@@ -331,7 +331,7 @@ public void testAppsQueryFinalStatusNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("apps is not null", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps is not None", 0, json.getJSONObject("apps").length());
rm.stop();
}
@@ -507,7 +507,7 @@ public void testAppsQueryStartEnd() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("apps is not null", JSONObject.NULL, json.get("apps"));
+ assertEquals("apps is not None", 0, json.getJSONObject("apps").length());
rm.stop();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
index 533ea11..f464b97 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodes.java
@@ -201,7 +201,7 @@ public void testNodesQueryStateNone() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("nodes is not null", JSONObject.NULL, json.get("nodes"));
+ assertEquals("nodes is not None", 0, json.getJSONObject("nodes").length());
}
@Test
@@ -371,7 +371,7 @@ public void testNodesQueryHealthyAndState() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("nodes is not null", JSONObject.NULL, json.get("nodes"));
+ assertEquals("nodes is not None", 0, json.getJSONObject("nodes").length());
}
@Test
@@ -388,7 +388,7 @@ public void testNodesQueryHealthyFalse() throws JSONException, Exception {
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
- assertEquals("nodes is not null", JSONObject.NULL, json.get("nodes"));
+ assertEquals("nodes is not None", 0, json.getJSONObject("nodes").length());
}
@Test
diff --git a/hadoop-yarn-project/hadoop-yarn/pom.xml b/hadoop-yarn-project/hadoop-yarn/pom.xml
index dc5a574..41e936f 100644
--- a/hadoop-yarn-project/hadoop-yarn/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/pom.xml
@@ -43,16 +43,8 @@
<artifactId>commons-el</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-2.1-jetty</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -111,6 +103,11 @@
<artifactId>guice</artifactId>
</dependency>
<dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
<scope>test</scope>
diff --git a/hadoop-yarn-project/pom.xml b/hadoop-yarn-project/pom.xml
index d525d5a..79e5d54 100644
--- a/hadoop-yarn-project/pom.xml
+++ b/hadoop-yarn-project/pom.xml
@@ -51,8 +51,8 @@
<artifactId>avro</artifactId>
<exclusions>
<exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.ant</groupId>
@@ -86,16 +86,8 @@
<artifactId>commons-el</artifactId>
</exclusion>
<exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-runtime</artifactId>
- </exclusion>
- <exclusion>
- <groupId>tomcat</groupId>
- <artifactId>jasper-compiler</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jsp-2.1-jetty</artifactId>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-jasper</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -133,6 +125,10 @@
<artifactId>guice</artifactId>
</dependency>
<dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ </dependency>
+ <dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</dependency>