Upgrade to version 2.7.6

This commit is contained in:
Mike Miller 2018-06-27 13:11:44 -04:00
parent 7229ada64b
commit cd7389bf2c
5 changed files with 174 additions and 85 deletions

2
.gitignore vendored
View File

@ -1,3 +1,3 @@
/tarballs/
/clog
/hadoop-2.7.3-src.tar.gz
/hadoop-2.7.6-src.tar.gz

View File

@ -115,7 +115,7 @@ diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project
index 7cf67a3..ef2733f 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -89,25 +89,34 @@
@@ -89,29 +89,34 @@
<scope>compile</scope>
</dependency>
<dependency>
@ -140,25 +140,27 @@ index 7cf67a3..ef2733f 100644
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ <version>8.1</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
<scope>compile</scope>
</dependency>
-
<dependency>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty-sslengine</artifactId>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-webapp</artifactId>
+ <version>8.1</version>
<scope>compile</scope>
</dependency>
+
<dependency>
- <groupId>javax.servlet.jsp</groupId>
- <artifactId>jsp-api</artifactId>
- <scope>runtime</scope>
+ <groupId>org.apache.tomcat</groupId>
+ <artifactId>tomcat-el-api</artifactId>
+ <version>7.0.37</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
@@ -125,6 +134,11 @@
<artifactId>jersey-server</artifactId>
<scope>compile</scope>
@ -493,7 +495,15 @@ index 2f28d08..3ac7086 100644
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
@@ -69,29 +70,30 @@
@@ -59,7 +60,6 @@ import org.apache.hadoop.security.authentication.util.FileSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.authentication.util.ZKSignerSecretProvider;
-import org.apache.hadoop.security.ssl.SslSelectChannelConnectorSecure;
import org.apache.hadoop.jmx.JMXJsonServlet;
import org.apache.hadoop.log.LogLevel;
import org.apache.hadoop.metrics.MetricsServlet;
@@ -70,29 +71,31 @@
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
@ -509,7 +519,7 @@ index 2f28d08..3ac7086 100644
-import org.mortbay.jetty.handler.HandlerCollection;
-import org.mortbay.jetty.handler.RequestLogHandler;
-import org.mortbay.jetty.nio.SelectChannelConnector;
-import org.mortbay.jetty.security.SslSocketConnector;
-import org.mortbay.jetty.security.SslSelectChannelConnector;
-import org.mortbay.jetty.servlet.AbstractSessionManager;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.DefaultServlet;
@ -532,6 +542,7 @@ index 2f28d08..3ac7086 100644
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.server.nio.SelectChannelConnector;
+import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
+import org.eclipse.jetty.server.session.AbstractSessionManager;
+import org.eclipse.jetty.server.ssl.SslSocketConnector;
+import org.eclipse.jetty.servlet.DefaultServlet;
@ -558,40 +569,42 @@ index 2f28d08..3ac7086 100644
protected final List<String> filterNames = new ArrayList<>();
static final String STATE_DESCRIPTION_ALIVE = " - alive";
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
@@ -299,22 +301,23 @@ public HttpServer2 build() throws IOException {
if ("http".equals(scheme)) {
listener = HttpServer2.createDefaultChannelConnector();
} else if ("https".equals(scheme)) {
- SslSocketConnector c = new SslSocketConnectorSecure();
- c.setHeaderBufferSize(1024*64);
- c.setNeedClientAuth(needsClientAuth);
- c.setKeyPassword(keyPassword);
+ // Jetty 8+ moved JKS config to SslContextFactory
+ SslContextFactory scf = new SslContextFactory();
+ scf.setNeedClientAuth(needsClientAuth);
+ scf.setKeyManagerPassword(keyPassword);
@@ -320,23 +322,24 @@ public final class HttpServer2 implements FilterContainer {
}
if (keyStore != null) {
- c.setKeystore(keyStore);
- c.setKeystoreType(keyStoreType);
- c.setPassword(keyStorePassword);
+ scf.setKeyStorePath(keyStore);
+ scf.setKeyStoreType(keyStoreType);
+ scf.setKeyStorePassword(keyStorePassword);
}
private Connector createHttpsChannelConnector() {
- SslSelectChannelConnector c = new SslSelectChannelConnectorSecure();
- configureChannelConnector(c);
-
- c.setNeedClientAuth(needsClientAuth);
- c.setKeyPassword(keyPassword);
+ // Jetty 8+ moved JKS config to SslContextFactory
+ SslContextFactory scf = new SslContextFactory();
+ scf.setNeedClientAuth(needsClientAuth);
+ scf.setKeyManagerPassword(keyPassword);
if (trustStore != null) {
- c.setTruststore(trustStore);
- c.setTruststoreType(trustStoreType);
- c.setTrustPassword(trustStorePassword);
+ scf.setTrustStore(trustStore);
+ scf.setTrustStoreType(trustStoreType);
+ scf.setTrustStorePassword(trustStorePassword);
}
+ SslSocketConnector c = new SslSocketConnector(scf);
listener = c;
if (keyStore != null) {
- c.setKeystore(keyStore);
- c.setKeystoreType(keyStoreType);
- c.setPassword(keyStorePassword);
+ scf.setKeyStorePath(keyStore);
+ scf.setKeyStoreType(keyStoreType);
+ scf.setKeyStorePassword(keyStorePassword);
}
} else {
if (trustStore != null) {
- c.setTruststore(trustStore);
- c.setTruststoreType(trustStoreType);
- c.setTrustPassword(trustStorePassword);
+ scf.setTrustStore(trustStore);
+ scf.setTrustStoreType(trustStoreType);
+ scf.setTrustStorePassword(trustStorePassword);
}
+ SslSelectChannelConnector c = new SslSelectChannelConnector(scf);
+ configureChannelConnector(c);
if(null != excludeCiphers && !excludeCiphers.isEmpty()) {
c.setExcludeCipherSuites(excludeCiphers.split(","));
@@ -369,7 +373,8 @@ private void initializeWebServer(String name, String hostName,
if (sm instanceof AbstractSessionManager) {
AbstractSessionManager asm = (AbstractSessionManager)sm;
@ -619,16 +632,16 @@ index 2f28d08..3ac7086 100644
final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
conf = new Configuration(conf);
@@ -508,7 +516,8 @@ public static Connector createDefaultChannelConnector() {
@@ -522,7 +528,8 @@ public final class HttpServer2 implements FilterContainer {
// the same port with indeterminate routing of incoming requests to them
ret.setReuseAddress(false);
c.setReuseAddress(false);
}
- ret.setHeaderBufferSize(1024*64);
+ ret.setRequestHeaderSize(1024*64);
+ ret.setResponseHeaderSize(1024*64);
return ret;
- c.setHeaderBufferSize(1024*64);
+ c.setRequestHeaderSize(1024*64);
+ c.setResponseHeaderSize(1024*64);
}
@InterfaceAudience.Private
@@ -541,7 +550,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
// set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = System.getProperty("hadoop.log.dir");
@ -1354,16 +1367,16 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/had
index b0f14f4..fb81ab2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
@@ -28,9 +28,9 @@
@@ -29,9 +29,9 @@ import org.junit.Test;
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
-import org.mortbay.jetty.Connector;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.security.SslSocketConnector;
-import org.mortbay.jetty.security.SslSelectChannelConnector;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ssl.SslSocketConnector;
+import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
public class TestJettyHelper implements MethodRule {
private boolean ssl;
@ -1488,9 +1501,9 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdf
index db8f92e..79d9003 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystemMBean.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
import org.apache.hadoop.metrics2.impl.TestMetricsConfig;
import org.junit.Test;
-import org.mortbay.util.ajax.JSON;
+import org.eclipse.jetty.util.ajax.JSON;
@ -1888,12 +1901,12 @@ index b315e2b..e9b072d 100644
<curator.version>2.7.1</curator.version>
<findbugs.version>3.0.0</findbugs.version>
- <tomcat.version>6.0.44</tomcat.version>
- <tomcat.version>6.0.48</tomcat.version>
+ <tomcat.version>7.0.37</tomcat.version>
<!-- define the Java language version used by the compiler -->
<javac.version>1.7</javac.version>
@@ -455,23 +455,17 @@
@@ -455,28 +455,22 @@
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
@ -1911,7 +1924,7 @@ index b315e2b..e9b072d 100644
- </exclusion>
- </exclusions>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <artifactId>jetty-server</artifactId>
+ <version>8</version>
</dependency>
<dependency>
@ -1919,6 +1932,13 @@ index b315e2b..e9b072d 100644
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
- <version>6.1.26</version>
+ <version>8</version>
</dependency>
<dependency>
- <groupId>org.mortbay.jetty</groupId>
+ <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-sslengine</artifactId>
- <version>6.1.26</version>
+ <version>8</version>
</dependency>
<dependency>
@ -2332,19 +2352,6 @@ index 1be0115..420a41c 100644
context.setContextPath("/foo");
server.setHandler(context);
context.addServlet(new ServletHolder(TestServlet.class), "/bar");
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java
index 52ab7ad..6b07871 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSocketConnectorSecure.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.security.ssl;
-import org.mortbay.jetty.security.SslSocketConnector;
+import org.eclipse.jetty.server.ssl.SslSocketConnector;
import javax.net.ssl.SSLServerSocket;
import java.io.IOException;
diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml
index c479b67..a065485 100644
--- a/hadoop-common-project/hadoop-kms/pom.xml
@ -2462,4 +2469,29 @@ index 7fb900d..27582fb 100644
+import org.eclipse.jetty.webapp.WebAppContext;
import com.google.common.annotations.VisibleForTesting;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
index 7ff2292..b6c9f6f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java
@@ -26,7 +26,8 @@ import javax.net.ssl.SSLEngine;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.mortbay.jetty.security.SslSelectChannelConnector;
+import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
+import java.nio.channels.SocketChannel;
/**
* This subclass of the Jetty SslSelectChannelConnector exists solely to
@@ -47,8 +48,8 @@ public class SslSelectChannelConnectorSecure extends SslSelectChannelConnector {
* Disable SSLv3 protocol.
*/
@Override
- protected SSLEngine createSSLEngine() throws IOException {
- SSLEngine engine = super.createSSLEngine();
+ protected SSLEngine createSSLEngine(SocketChannel channel) throws IOException {
+ SSLEngine engine = super.createSSLEngine(channel);
ArrayList<String> nonSSLProtocols = new ArrayList<String>();
for (String p : engine.getEnabledProtocols()) {
if (!p.contains("SSLv3")) {

View File

@ -7,7 +7,7 @@ index b315e2b..9ad8bcd 100644
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
- <version>11.0.2</version>
+ <version>18.0</version>
+ <version>20.0</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
@ -24,3 +24,58 @@ index d55c80b..4505aa9 100644
private String name = null;
private boolean dump = false;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
index 8656ae9..f78f6be 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
@@ -1160,7 +1160,7 @@ public class DataStorage extends Storage {
}
linkWorkers.shutdown();
for (Future<Void> f : futures) {
- Futures.get(f, IOException.class);
+ Futures.getChecked(f, IOException.class);
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java
index 1f9e3e9..961b158 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs;
-import com.google.common.collect.Iterators;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.inotify.EventBatch;
@@ -33,6 +32,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.util.Collections;
import java.util.Iterator;
import java.util.Random;
import java.util.concurrent.TimeUnit;
@@ -77,7 +77,7 @@ public class DFSInotifyEventInputStream {
long lastReadTxid) throws IOException {
this.traceSampler = traceSampler;
this.namenode = namenode;
- this.it = Iterators.emptyIterator();
+ this.it = Collections.emptyIterator();
this.lastReadTxid = lastReadTxid;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
index 2ee2ba7..d60b277 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
@@ -1052,7 +1052,7 @@ public class ClientRMService extends AbstractService implements
new RMAppMoveEvent(applicationId, request.getTargetQueue(), future));
try {
- Futures.get(future, YarnException.class);
+ Futures.getChecked(future, YarnException.class);
} catch (YarnException ex) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService",

View File

@ -10,8 +10,8 @@
%global __provides_exclude_from ^%{_libdir}/%{name}/.*$
Name: hadoop
Version: 2.7.3
Release: 10%{?dist}
Version: 2.7.6
Release: 1%{?dist}
Summary: A software platform for processing vast amounts of data
# The BSD license file is missing
# https://issues.apache.org/jira/browse/HADOOP-9849
@ -43,7 +43,7 @@ Patch2: %{name}-jni-library-loading.patch
Patch4: %{name}-no-download-tomcat.patch
# Use dlopen to find libjvm.so
Patch5: %{name}-dlopen-libjvm.patch
# Update to Guava 18.0
# Update to Guava 20
Patch7: %{name}-guava.patch
# Update to Netty 3.6.6-Final
Patch8: %{name}-netty-3-Final.patch
@ -58,10 +58,8 @@ Patch12: %{name}-armhfp.patch
Patch13: hadoop-jersey1.patch
# fix java8 doclint
Patch14: hadoop-2.4.1-disable-doclint.patch
%if 0%{?fedora} > 25
# Fix Protobuf compiler errors after updating to 3.1.0
Patch19: protobuf3.patch
%endif
# Patch openssl 1.0.2 to use 1.1.0
Patch21: %{name}-openssl.patch
# fix exception no longer thrown in aws
@ -108,7 +106,7 @@ BuildRequires: glassfish-jsp
BuildRequires: glassfish-jsp-api
BuildRequires: google-guice
BuildRequires: grizzly
BuildRequires: guava
BuildRequires: guava20
BuildRequires: guice-servlet
BuildRequires: hamcrest
BuildRequires: hawtjni
@ -426,6 +424,9 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
%autosetup -p1 -n %{name}-%{version}-src
%pom_xpath_set "pom:properties/pom:protobuf.version" 3.5.0 hadoop-project
# remove yarn-server-nodemanager native build for now (possible bug with cmake macro and build flags)
%pom_remove_plugin :maven-antrun-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager
%pom_xpath_inject "pom:plugin[pom:artifactId='maven-jar-plugin']/pom:executions/pom:execution[pom:phase='test-compile']" "<id>default-jar</id>" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell
# Remove the maven-site-plugin. It's not needed
@ -500,12 +501,7 @@ rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/T
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSWithZK.java
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithSaslDataTransfer.java
rm -rf hadoop-hdfs-project/hadoop-hdfs/src/test
rm -rf hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test
rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test
@ -524,6 +520,11 @@ rm -rf hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test
rm -rf hadoop-tools/hadoop-streaming/src/test
rm -rf hadoop-tools/hadoop-gridmix/src/test/java
rm -rf hadoop-tools/hadoop-extras/src/test
rm -rf hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test
rm -rf hadoop-hdfs-project/hadoop-hdfs-nfs/src/test
rm -rf hadoop-tools/hadoop-distcp/src/test
rm -rf hadoop-tools/hadoop-archives/src/test
rm -rf hadoop-tools/hadoop-datajoin/src/test
# Remove dist plugin. It's not needed and has issues
%pom_remove_plugin :maven-antrun-plugin hadoop-common-project/hadoop-kms
@ -1127,10 +1128,11 @@ fi
%files yarn-security
%config(noreplace) %{_sysconfdir}/%{name}/container-executor.cfg
# Permissions set per upstream guidelines: https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/ClusterSetup.html#Configuration_in_Secure_Mode
%attr(6050,root,yarn) %{_bindir}/container-executor
%changelog
* Wed Jun 27 2018 Mike Miller <mmiller@apache.org> - 2.7.6-11
- Upgrade to version 2.7.6
* Tue May 29 2018 Rafael dos Santos <rdossant@redhat.com> - 2.7.3-10
- Use standard Fedora build/linker flags (rhbz#1540172)

View File

@ -1 +1 @@
SHA512 (hadoop-2.7.3-src.tar.gz) = 8451f89d3cbb672888abc67c76a53b2d50f44b8878127c3e361cb354cd1b5a3a2bc7d531c1ba67e9bc3d17e5c6aa496d11969484c12c86b56e8823cd1ab6482a
SHA512 (hadoop-2.7.6-src.tar.gz) = 8a87bcbd58bd47c996ba6c00739d3261003883fe747d00ed3da8196ae9c625dc6d53e8a25e78eb6bc9fb0b8c975f834a4dca7198e1beb5e4773008eb4726f168