Use protobuf 3.1.0; fixes FTBFS (bz#1396787)

This commit is contained in:
Christopher Tubbs 2016-12-20 16:32:34 -05:00
parent 4b8be7b521
commit 31f5543561
2 changed files with 43 additions and 1 deletions

View File

@ -14,7 +14,7 @@
Name: hadoop
Version: 2.4.1
Release: 24%{?dist}
Release: 25%{?dist}
Summary: A software platform for processing vast amounts of data
# The BSD license file is missing
# https://issues.apache.org/jira/browse/HADOOP-9849
@ -71,6 +71,10 @@ Patch16: hadoop-2.4.1-servlet-3.1-api.patch
Patch17: hadoop-2.4.1-new-bookkeeper.patch
# Fix POM warnings which become errors in newest Maven
Patch18: fix-pom-errors.patch
%if 0%{?fedora} > 25
# Fix Protobuf compiler errors after updating to 3.1.0
Patch19: protobuf3.patch
%endif
# This is not a real BR, but is here because of rawhide shift to eclipse
# aether packages which caused a dependency of a dependency to not get
@ -426,7 +430,11 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
%prep
%autosetup -p1 -n %{name}-common-%{commit}
%if 0%{?fedora} > 25
%pom_xpath_set "pom:properties/pom:protobuf.version" 3.1.0 hadoop-project
%else
%pom_xpath_set "pom:properties/pom:protobuf.version" 2.6.1 hadoop-project
%endif
%pom_xpath_inject "pom:plugin[pom:artifactId='maven-jar-plugin']/pom:executions/pom:execution[pom:phase='test-compile']" "<id>default-jar</id>" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell
# Remove the maven-site-plugin. It's not needed
@ -1029,6 +1037,9 @@ fi
%attr(6050,root,yarn) %{_bindir}/container-executor
%changelog
* Tue Dec 20 2016 Christopher Tubbs <ctubbsii@fedoraproject.org> - 2.4.1-25
- Use protobuf 3.1.0; fixes FTBFS (bz#1396787)
* Fri Oct 28 2016 Christopher Tubbs <ctubbsii@fedoraproject.org> - 2.4.1-24
- build libhdfs for all architectures (bz#1328076)

31
protobuf3.patch Normal file
View File

@ -0,0 +1,31 @@
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java 2016-12-20 15:18:20.683682746 -0500
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java 2016-12-20 15:18:10.496932909 -0500
@@ -53,7 +53,7 @@
import com.google.protobuf.BlockingService;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.GeneratedMessage;
+import com.google.protobuf.GeneratedMessageV3;
import com.google.protobuf.Message;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;
@@ -281,7 +281,7 @@
* Protobuf. Several methods on {@link org.apache.hadoop.ipc.Server and RPC}
* use type Writable as a wrapper to work across multiple RpcEngine kinds.
*/
- private static abstract class RpcMessageWithHeader<T extends GeneratedMessage>
+ private static abstract class RpcMessageWithHeader<T extends GeneratedMessageV3>
implements RpcWrapper {
T requestHeader;
Message theRequest; // for clientSide, the request is here
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java
@@ -584,7 +584,7 @@ private SectionName(String name) {
}
}
- private static int getOndiskTrunkSize(com.google.protobuf.GeneratedMessage s) {
+ private static int getOndiskTrunkSize(com.google.protobuf.GeneratedMessageV3 s) {
return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize())
+ s.getSerializedSize();
}