Mock Version: 1.4.9 Mock Version: 1.4.9 ENTER ['do'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target riscv64 --nodeps /builddir/build/SPECS/hadoop.spec'], chrootPath='/var/lib/mock/f29-build-29159-15703/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'en_US.UTF-8', 'http_proxy': 'http://192.168.0.254:3128', 'https_proxy': 'http://192.168.0.254:3128'}shell=Falselogger=timeout=345600uid=998gid=135user='mockbuild'nspawn_args=[]unshare_net=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target riscv64 --nodeps /builddir/build/SPECS/hadoop.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'en_US.UTF-8', 'http_proxy': 'http://192.168.0.254:3128', 'https_proxy': 'http://192.168.0.254:3128'} and shell False Building target platforms: riscv64 Building for target riscv64 Wrote: /builddir/build/SRPMS/hadoop-2.7.6-5.fc29.src.rpm Child return code was: 0 ENTER ['do'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --target riscv64 --nodeps /builddir/build/SPECS/hadoop.spec'], chrootPath='/var/lib/mock/f29-build-29159-15703/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'en_US.UTF-8', 'http_proxy': 'http://192.168.0.254:3128', 'https_proxy': 'http://192.168.0.254:3128'}shell=Falselogger=timeout=345600uid=998gid=135user='mockbuild'nspawn_args=[]unshare_net=FalseprintOutput=False) Executing command: ['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --target riscv64 --nodeps /builddir/build/SPECS/hadoop.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'en_US.UTF-8', 'http_proxy': 'http://192.168.0.254:3128', 'https_proxy': 'http://192.168.0.254:3128'} and shell False Building target platforms: riscv64 Building for target riscv64 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.x3Q5Ee + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf hadoop-2.7.6-src + /usr/bin/gzip -dc /builddir/build/SOURCES/hadoop-2.7.6-src.tar.gz + /usr/bin/tar -xof - + STATUS=0 + '[' 0 -ne 0 ']' + cd hadoop-2.7.6-src + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + /usr/bin/cat /builddir/build/SOURCES/hadoop-fedora-integration.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-jni-library-loading.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-no-download-tomcat.patch + /usr/bin/cat /builddir/build/SOURCES/hadoop-dlopen-libjvm.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-guava.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-netty-3-Final.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-tools.jar.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-build.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-armhfp.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-jersey1.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-2.4.1-disable-doclint.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/protobuf3.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-openssl.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/hadoop-aws.patch + /usr/bin/cat /builddir/build/SOURCES/classpath.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/cat /builddir/build/SOURCES/fix-container-executor-cmake.patch + /usr/bin/cat /builddir/build/SOURCES/backport-CVE-2018-8009.patch + /usr/bin/patch -p1 -s --fuzz=0 --no-backup-if-mismatch + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_set pom:properties/pom:protobuf.version 3.5.0 hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-antrun-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_inject 'pom:plugin[pom:artifactId='\''maven-jar-plugin'\'']/pom:executions/pom:execution[pom:phase='\''test-compile'\'']' 'default-jar' hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-site-plugin + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-site-plugin hadoop-common-project/hadoop-auth + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-site-plugin hadoop-hdfs-project/hadoop-hdfs-httpfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-hdfs-project/hadoop-hdfs-httpfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-mapreduce-project/hadoop-mapreduce-client + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-mapreduce-project/hadoop-mapreduce-examples + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-mapreduce-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-project-dist + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-tools/hadoop-rumen + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-tools/hadoop-streaming + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-yarn-project/hadoop-yarn + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :findbugs-maven-plugin hadoop-yarn-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-project-info-reports-plugin hadoop-common-project/hadoop-auth + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-project-info-reports-plugin hadoop-hdfs-project/hadoop-hdfs-httpfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-project-info-reports-plugin hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-checkstyle-plugin hadoop-project-dist + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-checkstyle-plugin hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-checkstyle-plugin hadoop-tools/hadoop-distcp + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_disable_module hadoop-minikdc hadoop-common-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-common + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-auth + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-common-project/hadoop-kms + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-hdfs-project/hadoop-hdfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-minikdc hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java + rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java + rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java + rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZonesWithKMS.java + rm -f hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :tomcat-embed-core hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :tomcat-embed-logging-juli hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :tomcat-embed-core hadoop-common-project/hadoop-auth + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :tomcat-embed-logging-juli hadoop-common-project/hadoop-auth + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java + rm -f hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_remove 'pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-auth'\'' and pom:type='\''test-jar'\'']' hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_remove 'pom:project/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-auth'\'' and pom:type='\''test-jar'\'']' hadoop-hdfs-project/hadoop-hdfs-httpfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_remove 'pom:project/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-auth'\'' and pom:type='\''test-jar'\'']' hadoop-common-project/hadoop-common + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_remove 'pom:project/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-auth'\'' and pom:type='\''test-jar'\'']' hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_remove 'pom:project/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-auth'\'' and pom:type='\''test-jar'\'']' hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGILoginFromKeytab.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestChildReaper.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java + rm -f hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java + rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSWithZK.java + rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java + rm -f hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java + rm -rf hadoop-hdfs-project/hadoop-hdfs/src/test + rm -rf hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager + rm -f hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/test/YarnTestDriver.java + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/test + rm -rf hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test + rm -f hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleHandler.java + rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test + rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test + rm -rf hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test + rm -rf hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test + rm -rf hadoop-tools/hadoop-streaming/src/test + rm -rf hadoop-tools/hadoop-gridmix/src/test/java + rm -rf hadoop-tools/hadoop-extras/src/test + rm -rf hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/test + rm -rf hadoop-hdfs-project/hadoop-hdfs-nfs/src/test + rm -rf hadoop-tools/hadoop-distcp/src/test + rm -rf hadoop-tools/hadoop-archives/src/test + rm -rf hadoop-tools/hadoop-datajoin/src/test + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-antrun-plugin hadoop-common-project/hadoop-kms + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-antrun-plugin hadoop-dist + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-jar-plugin hadoop-common-project/hadoop-auth + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_set 'pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='\''apacheds-kerberos-codec'\'']/pom:version' 2.0.0-M21 hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_disable_module hadoop-pipes hadoop-tools + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-pipes hadoop-tools/hadoop-tools-dist + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_add_dep org.iq80.leveldb:leveldb hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_add_dep org.fusesource.hawtjni:hawtjni-runtime hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_set 'pom:project/pom:dependencies/pom:dependency[pom:artifactId='\''hadoop-common'\'' and pom:type='\''test-jar'\'']/pom:scope' test hadoop-tools/hadoop-openstack + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_set 'pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='\''asm'\'']/pom:version' 5.0.2 hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_xpath_set 'pom:project/pom:dependencyManagement/pom:dependencies/pom:dependency[pom:artifactId='\''asm'\'']/pom:groupId' org.ow2.asm hadoop-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_add_dep org.iq80.leveldb:leveldb hadoop-hdfs-project/hadoop-hdfs + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_add_dep org.iq80.leveldb:leveldb hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-jar-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_plugin :maven-antrun-plugin hadoop-tools/hadoop-streaming + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_disable_module hadoop-azure hadoop-tools + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-azure hadoop-tools/hadoop-tools-dist + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_disable_module hadoop-kms hadoop-common-project + /usr/bin/python3 /usr/share/java-utils/pom_editor.py pom_remove_dep :hadoop-kms hadoop-hdfs-project/hadoop-hdfs + /usr/bin/python3 /usr/share/java-utils/mvn_package.py :hadoop-auth-examples __noinstall + /usr/bin/python3 /usr/share/java-utils/mvn_package.py :hadoop-hdfs-httpfs __noinstall + /usr/bin/python3 /usr/share/java-utils/mvn_package.py :hadoop-assemblies __noinstall + /usr/bin/python3 /usr/share/java-utils/mvn_package.py :hadoop-project-dist __noinstall + /usr/bin/python3 /usr/share/java-utils/mvn_package.py :::tests: hadoop-tests + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-*-tests::{}:' hadoop-tests + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-client*::{}:' hadoop-client + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-hdfs*::{}:' hadoop-hdfs + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-mapreduce-examples*::{}:' hadoop-mapreduce-examples + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-mapreduce*::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-archives::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-datajoin::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-distcp::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-extras::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-gridmix::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-openstack::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-rumen::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-sls::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-streaming::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-tools*::{}:' hadoop-mapreduce + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-maven-plugins::{}:' hadoop-maven-plugin + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-minicluster::{}:' hadoop-tests + /usr/bin/python3 /usr/share/java-utils/mvn_package.py ':hadoop-yarn*::{}:' hadoop-yarn + /usr/bin/python3 /usr/share/java-utils/mvn_file.py :hadoop-common::tests: hadoop/hadoop-common Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.SdyeX5 + exit 0 + umask 022 + cd /builddir/build/BUILD + cd hadoop-2.7.6-src + CFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection' + export CFLAGS + CXXFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection' + export CXXFLAGS + FFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -I/usr/lib64/gfortran/modules' + export FCFLAGS + LDFLAGS='-Wl,-z,relro -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld' + export LDFLAGS + /usr/bin/python3 /usr/share/java-utils/mvn_build.py -j -- -Drequire.snappy=true -Dcontainer-executor.conf.dir=/etc/hadoop -Pdist,native -DskipTests -DskipTest -DskipIT -Dmaven.javadoc.skip=true BUILDSTDERR: Executing: xmvn --batch-mode --offline -Drequire.snappy=true -Dcontainer-executor.conf.dir=/etc/hadoop -Pdist,native -DskipTests -DskipTest -DskipIT -Dmaven.javadoc.skip=true verify org.fedoraproject.xmvn:xmvn-mojo:install org.fedoraproject.xmvn:xmvn-mojo:builddep BUILDSTDERR: ['xmvn', '--batch-mode', '--offline', '-Drequire.snappy=true', '-Dcontainer-executor.conf.dir=/etc/hadoop', '-Pdist,native', '-DskipTests', '-DskipTest', '-DskipIT', '-Dmaven.javadoc.skip=true', 'verify', 'org.fedoraproject.xmvn:xmvn-mojo:install', 'org.fedoraproject.xmvn:xmvn-mojo:builddep'] [INFO] Scanning for projects... [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-auth:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 141, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-nfs:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 98, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-gridmix:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 108, column 16 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-openstack:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-project-info-reports-plugin is missing. @ line 76, column 15 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 65, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-sls:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 85, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-aws:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-project-info-reports-plugin is missing. @ line 78, column 15 [WARNING] 'build.plugins.plugin.version' for org.codehaus.mojo:findbugs-maven-plugin is missing. @ line 67, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-build-tools:jar:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-remote-resources-plugin is missing. @ org.apache.hadoop:hadoop-build-tools:[unknown-version], /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/pom.xml, line 80, column 15 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-site-plugin is missing. @ org.apache.hadoop:hadoop-build-tools:[unknown-version], /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/pom.xml, line 46, column 15 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-javadoc-plugin is missing. @ org.apache.hadoop:hadoop-main:2.7.6, /builddir/build/BUILD/hadoop-2.7.6-src/pom.xml, line 368, column 19 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-resources-plugin is missing. @ org.apache.hadoop:hadoop-build-tools:[unknown-version], /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/pom.xml, line 54, column 15 [WARNING] [WARNING] Some problems were encountered while building the effective model for org.apache.hadoop:hadoop-main:pom:2.7.6 [WARNING] 'build.plugins.plugin.version' for org.apache.maven.plugins:maven-javadoc-plugin is missing. @ line 368, column 19 [WARNING] [WARNING] It is highly recommended to fix these problems because they threaten the stability of your build. [WARNING] [WARNING] For this reason, future Maven versions might no longer support building such malformed projects. [WARNING] [INFO] ------------------------------------------------------------------------ [INFO] Reactor Build Order: [INFO] [INFO] Apache Hadoop Main [pom] [INFO] Apache Hadoop Build Tools [jar] [INFO] Apache Hadoop Project POM [pom] [INFO] Apache Hadoop Annotations [jar] [INFO] Apache Hadoop Assemblies [jar] [INFO] Apache Hadoop Project Dist POM [pom] [INFO] Apache Hadoop Maven Plugins [maven-plugin] [INFO] Apache Hadoop Auth [jar] [INFO] Apache Hadoop Auth Examples [war] [INFO] Apache Hadoop Common [jar] [INFO] Apache Hadoop NFS [jar] [INFO] Apache Hadoop Common Project [pom] [INFO] Apache Hadoop HDFS [jar] [INFO] Apache Hadoop HttpFS [war] [INFO] Apache Hadoop HDFS BookKeeper Journal [jar] [INFO] Apache Hadoop HDFS-NFS [jar] [INFO] Apache Hadoop HDFS Project [pom] [INFO] hadoop-yarn [pom] [INFO] hadoop-yarn-api [jar] [INFO] hadoop-yarn-common [jar] [INFO] hadoop-yarn-server [pom] [INFO] hadoop-yarn-server-common [jar] [INFO] hadoop-yarn-server-nodemanager [jar] [INFO] hadoop-yarn-server-web-proxy [jar] [INFO] hadoop-yarn-server-applicationhistoryservice [jar] [INFO] hadoop-yarn-server-resourcemanager [jar] [INFO] hadoop-yarn-server-tests [jar] [INFO] hadoop-yarn-client [jar] [INFO] hadoop-yarn-server-sharedcachemanager [jar] [INFO] hadoop-yarn-applications [pom] [INFO] hadoop-yarn-applications-distributedshell [jar] [INFO] hadoop-yarn-applications-unmanaged-am-launcher [jar] [INFO] hadoop-yarn-site [pom] [INFO] hadoop-yarn-registry [jar] [INFO] hadoop-yarn-project [pom] [INFO] hadoop-mapreduce-client [pom] [INFO] hadoop-mapreduce-client-core [jar] [INFO] hadoop-mapreduce-client-common [jar] [INFO] hadoop-mapreduce-client-shuffle [jar] [INFO] hadoop-mapreduce-client-app [jar] [INFO] hadoop-mapreduce-client-hs [jar] [INFO] hadoop-mapreduce-client-jobclient [jar] [INFO] hadoop-mapreduce-client-hs-plugins [jar] [INFO] Apache Hadoop MapReduce Examples [jar] [INFO] hadoop-mapreduce [pom] [INFO] Apache Hadoop MapReduce Streaming [jar] [INFO] Apache Hadoop Distributed Copy [jar] [INFO] Apache Hadoop Archives [jar] [INFO] Apache Hadoop Rumen [jar] [INFO] Apache Hadoop Gridmix [jar] [INFO] Apache Hadoop Data Join [jar] [INFO] Apache Hadoop Ant Tasks [jar] [INFO] Apache Hadoop Extras [jar] [INFO] Apache Hadoop OpenStack support [jar] [INFO] Apache Hadoop Amazon Web Services support [jar] [INFO] Apache Hadoop Client [jar] [INFO] Apache Hadoop Mini-Cluster [jar] [INFO] Apache Hadoop Scheduler Load Simulator [jar] [INFO] Apache Hadoop Tools Dist [jar] [INFO] Apache Hadoop Tools [pom] [INFO] Apache Hadoop Distribution [jar] [INFO] [INFO] -------------------< org.apache.hadoop:hadoop-main >-------------------- [INFO] Building Apache Hadoop Main 2.7.6 [1/61] [INFO] --------------------------------[ pom ]--------------------------------- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (default) @ hadoop-main --- [INFO] [INFO] ----------------< org.apache.hadoop:hadoop-build-tools >---------------- [INFO] Building Apache Hadoop Build Tools 2.7.6 [2/61] [INFO] --------------------------------[ jar ]--------------------------------- [INFO] [INFO] --- maven-resources-plugin:3.1.0:copy-resources (copy-resources) @ hadoop-build-tools --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 2 resources [INFO] [INFO] --- maven-antrun-plugin:1.8:run (dummy) @ hadoop-build-tools --- [INFO] No Ant target defined - SKIPPED [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-build-tools --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 2 resources to META-INF [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/src/main/resources [INFO] [INFO] --- maven-remote-resources-plugin:1.5:bundle (default) @ hadoop-build-tools --- [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-build-tools --- [INFO] No sources to compile [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-build-tools --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/src/test/resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-build-tools --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-build-tools --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-build-tools --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-build-tools/target/hadoop-build-tools-2.7.6.jar [INFO] [INFO] ------------------< org.apache.hadoop:hadoop-project >------------------ [INFO] Building Apache Hadoop Project POM 2.7.6 [3/61] [INFO] --------------------------------[ pom ]--------------------------------- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (default) @ hadoop-project --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-project --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-project/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-project --- [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-project --- [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-project --- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-project --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-project --- [INFO] [INFO] ----------------< org.apache.hadoop:hadoop-annotations >---------------- [INFO] Building Apache Hadoop Annotations 2.7.6 [4/61] [INFO] --------------------------------[ jar ]--------------------------------- [WARNING] The POM for com.sun:tools:jar:SYSTEM is missing, no dependency information available [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-annotations --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-annotations --- [WARNING] Missing POM for com.sun:tools:jar:SYSTEM [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-annotations --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/src/main/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-annotations --- [INFO] Compiling 8 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/classes [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-annotations --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/src/test/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-annotations --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-annotations --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-annotations --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.6.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-annotations --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-annotations --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-annotations --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-annotations --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-annotations --- [INFO] [INFO] ----------------< org.apache.hadoop:hadoop-assemblies >----------------- [INFO] Building Apache Hadoop Assemblies 2.7.6 [5/61] [INFO] --------------------------------[ jar ]--------------------------------- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (default) @ hadoop-assemblies --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-assemblies --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-assemblies/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-assemblies --- [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-assemblies --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 10 resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-assemblies --- [INFO] No sources to compile [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-assemblies --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-assemblies/src/test/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-assemblies --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-assemblies --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-assemblies --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-assemblies/target/hadoop-assemblies-2.7.6.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-assemblies --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-assemblies/target/hadoop-assemblies-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-assemblies --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-assemblies/target/hadoop-assemblies-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-assemblies --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-assemblies --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-assemblies --- [INFO] [INFO] ---------------< org.apache.hadoop:hadoop-project-dist >---------------- [INFO] Building Apache Hadoop Project Dist POM 2.7.6 [6/61] [INFO] --------------------------------[ pom ]--------------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-project-dist --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-project-dist/target/test-dir [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-project-dist/target/test/data [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-project-dist --- [WARNING] Missing POM for com.sun:tools:jar:SYSTEM [INFO] [INFO] --- maven-jar-plugin:3.1.0:test-jar (prepare-test-jar) @ hadoop-project-dist --- [WARNING] JAR will be empty - no content was marked for inclusion! [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-project-dist/target/hadoop-project-dist-2.7.6-tests.jar [INFO] [INFO] >>> maven-source-plugin:3.0.1:jar (default) > generate-sources @ hadoop-project-dist >>> [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-project-dist --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] <<< maven-source-plugin:3.0.1:jar (default) < generate-sources @ hadoop-project-dist <<< [INFO] [INFO] [INFO] --- maven-source-plugin:3.0.1:jar (default) @ hadoop-project-dist --- [INFO] [INFO] >>> maven-source-plugin:3.0.1:test-jar (default) > generate-sources @ hadoop-project-dist >>> [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-project-dist --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] <<< maven-source-plugin:3.0.1:test-jar (default) < generate-sources @ hadoop-project-dist <<< [INFO] [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar (default) @ hadoop-project-dist --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (pre-dist) @ hadoop-project-dist --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-assembly-plugin:3.1.0:single (dist) @ hadoop-project-dist --- [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /bin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /etc/hadoop [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /libexec [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /sbin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /sbin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/doc/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/webapps [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/templates [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/templates/conf [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/sources [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/jdiff [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/jdiff [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/doc/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /include [WARNING] The following patterns were never triggered in this artifact exclusion filter: o 'org.apache.ant:*:jar' o 'jdiff:jdiff:jar' [INFO] Copying files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-project-dist/target/hadoop-project-dist-2.7.6 [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-project-dist --- [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-project-dist --- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-project-dist --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (tar) @ hadoop-project-dist --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-project-dist --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-project-dist --- [INFO] [INFO] ---------------< org.apache.hadoop:hadoop-maven-plugins >--------------- [INFO] Building Apache Hadoop Maven Plugins 2.7.6 [7/61] [INFO] ----------------------------[ maven-plugin ]---------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-maven-plugins --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-maven-plugins --- [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-maven-plugins --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/src/main/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-maven-plugins --- [INFO] Compiling 4 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/target/classes [INFO] [INFO] --- maven-plugin-plugin:3.5.1:descriptor (default-descriptor) @ hadoop-maven-plugins --- [INFO] Using 'UTF-8' encoding to read mojo source files. [INFO] java-javadoc mojo extractor found 0 mojo descriptor. [INFO] java-annotations mojo extractor found 2 mojo descriptors. [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-maven-plugins --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/src/test/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-maven-plugins --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-maven-plugins --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-maven-plugins --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/target/hadoop-maven-plugins-2.7.6.jar [INFO] [INFO] --- maven-plugin-plugin:3.5.1:addPluginArtifactMetadata (default-addPluginArtifactMetadata) @ hadoop-maven-plugins --- [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-maven-plugins --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/target/hadoop-maven-plugins-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-maven-plugins --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-maven-plugins/target/hadoop-maven-plugins-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-maven-plugins --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-maven-plugins --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-maven-plugins --- [INFO] [INFO] -------------------< org.apache.hadoop:hadoop-auth >-------------------- [INFO] Building Apache Hadoop Auth 2.7.6 [8/61] [INFO] --------------------------------[ jar ]--------------------------------- [WARNING] The POM for com.sun:tools:jar:any is missing, no dependency information available [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-auth --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-auth --- [WARNING] Missing POM for com.sun:tools:jar:any [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-auth --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/src/main/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-auth --- [INFO] Compiling 23 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/classes [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-auth --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/src/test/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-auth --- [INFO] Compiling 17 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/test-classes [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-auth --- [INFO] Tests are skipped. [INFO] [INFO] >>> maven-source-plugin:3.0.1:jar (default) > generate-sources @ hadoop-auth >>> [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-auth --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] <<< maven-source-plugin:3.0.1:jar (default) < generate-sources @ hadoop-auth <<< [INFO] [INFO] [INFO] --- maven-source-plugin:3.0.1:jar (default) @ hadoop-auth --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.6-sources.jar [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-auth --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.6.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-auth --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-auth --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-auth --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-auth --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-auth --- [INFO] [INFO] ---------------< org.apache.hadoop:hadoop-auth-examples >--------------- [INFO] Building Apache Hadoop Auth Examples 2.7.6 [9/61] [INFO] --------------------------------[ war ]--------------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-auth-examples --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-auth-examples --- [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-auth-examples --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 1 resource [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-auth-examples --- [INFO] Compiling 3 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/classes [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java uses or overrides a deprecated API. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java: Recompile with -Xlint:deprecation for details. [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-auth-examples --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/src/test/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-auth-examples --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-auth-examples --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-war-plugin:3.2.2:war (default-war) @ hadoop-auth-examples --- [INFO] Packaging webapp [INFO] Assembling webapp [hadoop-auth-examples] in [/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/hadoop-auth-examples-2.7.6] [INFO] Processing war project [INFO] Copying webapp resources [/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/src/main/webapp] [INFO] Webapp assembled in [1544 msecs] [INFO] Building war: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/hadoop-auth-examples.war [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-auth-examples --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/hadoop-auth-examples-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-auth-examples --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth-examples/target/hadoop-auth-examples-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-auth-examples --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-auth-examples --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-auth-examples --- [INFO] [INFO] ------------------< org.apache.hadoop:hadoop-common >------------------- [INFO] Building Apache Hadoop Common 2.7.6 [10/61] [INFO] --------------------------------[ jar ]--------------------------------- [WARNING] The POM for com.centerkey.utils:BareBonesBrowserLaunch:jar:3.1 is missing, no dependency information available [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (enforce-os) @ hadoop-common --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-common --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test-dir [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test/data [INFO] Executed tasks [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:protoc (compile-protoc) @ hadoop-common --- [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-common --- [WARNING] Missing POM for com.centerkey.utils:BareBonesBrowserLaunch:jar:3.1 [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:version-info (version-info) @ hadoop-common --- [WARNING] [svn, info] failed: java.io.IOException: Cannot run program "svn": error=2, No such file or directory [WARNING] [git, branch] failed: java.io.IOException: Cannot run program "git": error=2, No such file or directory [INFO] SCM: NONE [INFO] Computed MD5: f4684ce95d4841c4e396dde84c577b [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-common --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 7 resources [INFO] Copying 1 resource [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-common --- [INFO] Compiling 856 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/classes [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[50,19] sun.net.dns.ResolverConfiguration is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[51,20] sun.net.util.IPAddressUtil is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[46,16] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[21,16] sun.misc.Signal is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[22,16] sun.misc.SignalHandler is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java:[25,16] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java:[325,28] com.sun.jndi.ldap.LdapCtxFactory is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[507,9] sun.net.dns.ResolverConfiguration is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[525,11] sun.net.util.IPAddressUtil is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[527,21] sun.net.util.IPAddressUtil is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[529,18] sun.net.util.IPAddressUtil is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java:[531,21] sun.net.util.IPAddressUtil is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[331,39] sun.nio.ch.DirectBuffer is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[332,17] sun.misc.Cleaner is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[333,25] sun.nio.ch.DirectBuffer is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[687,17] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[689,7] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java:[689,24] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[44,43] sun.misc.SignalHandler is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[46,19] sun.misc.SignalHandler is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[50,39] sun.misc.Signal is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[50,21] sun.misc.Signal is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java:[59,24] sun.misc.Signal is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java:[39,37] sun.nio.ch.DirectBuffer is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java:[40,21] sun.misc.Cleaner is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java:[41,23] sun.nio.ch.DirectBuffer is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java:[136,20] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java:[142,22] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [WARNING] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java:[147,29] sun.misc.Unsafe is internal proprietary API and may be removed in a future release [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java: Some input files use or override a deprecated API. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java: Recompile with -Xlint:deprecation for details. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java: Some input files use unchecked or unsafe operations. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java: Recompile with -Xlint:unchecked for details. [INFO] [INFO] --- native-maven-plugin:1.0-alpha-8:javah (default) @ hadoop-common --- [INFO] /bin/sh -c cd '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common' && '/usr/lib/jvm/java-1.8.0-openjdk/bin/javah' '-d' '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah' '-classpath' '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/classes:/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-annotations/target/hadoop-annotations-2.7.6.jar:/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.144-1.riscv64.7.b01.fc28.riscv64/lib/tools.jar:/usr/share/java/guava20/guava-20.0.jar:/usr/share/java/commons-cli.jar:/usr/share/java/commons-math3.jar:/usr/share/java/xmlenc.jar:/usr/share/java/jakarta-commons-httpclient.jar:/usr/share/java/commons-codec.jar:/usr/share/java/commons-io.jar:/usr/share/java/commons-net.jar:/usr/share/java/apache-commons-collections.jar:/usr/share/java/jetty8/jetty-server-8.1.jar:/usr/share/java/jboss-servlet-3.0-api/jboss-servlet-api_3.0_spec.jar:/usr/share/java/jetty8/jetty-continuation-8.1.jar:/usr/share/java/jetty8/jetty-http-8.1.jar:/usr/share/java/jetty8/jetty-io-8.1.jar:/usr/share/java/jetty8/jetty-servlet-8.1.jar:/usr/share/java/jetty8/jetty-security-8.1.jar:/usr/share/java/jetty8/jetty-webapp-8.1.jar:/usr/share/java/jetty8/jetty-xml-8.1.jar:/usr/share/java/jetty8/jetty-util-8.1.jar:/usr/share/java/jersey1/jersey-core-1.19.jar:/usr/share/java/jsr-311.jar:/usr/share/java/jersey1/jersey-json-1.19.jar:/usr/share/java/jettison/jettison.jar:/usr/share/java/glassfish-jaxb/jaxb-runtime.jar:/usr/share/java/glassfish-jaxb/jaxb-core.jar:/usr/share/java/jaxb-api.jar:/usr/share/java/glassfish-jaxb/txw2.jar:/usr/share/java/istack-commons-runtime.jar:/usr/share/java/stax-ex.jar:/usr/share/java/glassfish-fastinfoset.jar:/usr/share/java/jackson/jackson-jaxrs.jar:/usr/share/java/jackson/jackson-xc.jar:/usr/share/java/jersey1/jersey-server-1.19.jar:/usr/share/java/objectweb-asm/asm.jar:/usr/share/java/jersey1/jersey-servlet-1.19.jar:/usr/share/java/commons-logging.jar:/usr/share/java/log4j-1.2.17.jar:/usr/share/java/jets3t/jets3t.jar:/usr/share/java/BareBonesBrowserLaunch.jar:/usr/share/java/httpcomponents/httpcore.jar:/usr/share/java/httpcomponents/httpclient.jar:/usr/share/java/mx4j/mx4j.jar:/usr/share/java/javamail/javax.mail.jar:/usr/share/java/bcprov.jar:/usr/share/java/java-xmlbuilder.jar:/usr/share/java/java-base64.jar:/usr/share/java/apache-commons-lang.jar:/usr/share/java/commons-configuration.jar:/usr/share/java/slf4j/slf4j-api.jar:/usr/share/java/jackson/jackson-core-asl.jar:/usr/share/java/jackson/jackson-mapper-asl.jar:/usr/share/java/avro/avro.jar:/usr/share/java/xz-java.jar:/usr/share/java/paranamer/paranamer.jar:/usr/lib/java/snappy-java/snappy-java.jar:/usr/share/java/protobuf/protobuf-java.jar:/usr/share/java/google-gson/gson.jar:/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.7.6.jar:/usr/share/java/apacheds/apacheds-kerberos-codec.jar:/usr/share/java/apacheds/apacheds-i18n.jar:/usr/share/java/apacheds-ldap-api/api-asn1-api.jar:/usr/share/java/apacheds-ldap-api/api-util.jar:/usr/share/java/ehcache-core.jar:/usr/share/java/slf4j/slf4j-jdk14.jar:/usr/share/java/glassfish-servlet-api.jar:/usr/share/java/hibernate3/hibernate-core-3.jar:/usr/share/java/antlr.jar:/usr/share/java/dom4j/dom4j.jar:/usr/share/java/jaxen.jar:/usr/share/java/hibernate-commons-annotations/hibernate-commons-annotations.jar:/usr/share/java/jboss-logging-tools/jboss-logging-annotations.jar:/usr/share/java/jboss-logging/jboss-logging.jar:/usr/share/java/hibernate-jpa-2.0-api/hibernate-jpa-2.0-api.jar:/usr/share/java/jboss-transaction-1.1-api/jboss-transaction-api_1.1_spec.jar:/usr/share/java/geronimo-jta.jar:/usr/share/java/ehcache-sizeof-agent.jar:/usr/share/java/curator/curator-framework.jar:/usr/share/java/jsch.jar:/usr/share/java/curator/curator-client.jar:/usr/share/java/curator/curator-recipes.jar:/usr/share/java/jsr-305.jar:/usr/share/java/htrace/htrace-core.jar:/usr/share/java/jackson-core.jar:/usr/share/java/jackson-databind.jar:/usr/share/java/jackson-annotations.jar:/usr/share/java/zookeeper/zookeeper.jar:/usr/share/java/netty3-3.10.6.jar:/usr/share/java/jzlib.jar:/usr/share/java/commons-compress.jar' 'org.apache.hadoop.io.compress.zlib.ZlibCompressor' 'org.apache.hadoop.io.compress.zlib.ZlibDecompressor' 'org.apache.hadoop.io.compress.bzip2.Bzip2Compressor' 'org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor' 'org.apache.hadoop.security.JniBasedUnixGroupsMapping' 'org.apache.hadoop.io.nativeio.NativeIO' 'org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory' 'org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping' 'org.apache.hadoop.io.compress.snappy.SnappyCompressor' 'org.apache.hadoop.io.compress.snappy.SnappyDecompressor' 'org.apache.hadoop.io.compress.lz4.Lz4Compressor' 'org.apache.hadoop.io.compress.lz4.Lz4Decompressor' 'org.apache.hadoop.crypto.OpensslCipher' 'org.apache.hadoop.crypto.random.OpensslSecureRandom' 'org.apache.hadoop.util.NativeCrc32' 'org.apache.hadoop.net.unix.DomainSocket' 'org.apache.hadoop.net.unix.DomainSocketWatcher' [INFO] [INFO] --- maven-antrun-plugin:1.8:run (make) @ hadoop-common --- [INFO] Executing tasks main: [exec] -- The C compiler identification is GNU 8.2.1 [exec] -- The CXX compiler identification is GNU 8.2.1 [exec] -- Check for working C compiler: /usr/bin/cc [exec] -- Check for working C compiler: /usr/bin/cc -- works [exec] -- Detecting C compiler ABI info [exec] -- Detecting C compiler ABI info - done [exec] -- Detecting C compile features [exec] -- Detecting C compile features - done [exec] -- Check for working CXX compiler: /usr/bin/c++ [exec] -- Check for working CXX compiler: /usr/bin/c++ -- works [exec] -- Detecting CXX compiler ABI info [exec] -- Detecting CXX compiler ABI info - done [exec] -- Detecting CXX compile features [exec] -- Detecting CXX compile features - done [exec] -- Found JNI: /usr/lib/jvm/java-1.8.0-openjdk/jre/lib/riscv64/libjawt.so JAVA_HOME=, JAVA_JVM_LIBRARY=/usr/lib/jvm/java-1.8.0-openjdk/jre/lib/riscv64/server/libjvm.so [exec] [exec] JAVA_INCLUDE_PATH=/usr/lib/jvm/java-1.8.0-openjdk/include, JAVA_INCLUDE_PATH2=/usr/lib/jvm/java-1.8.0-openjdk/include/linux [exec] Located all JNI components successfully. [exec] -- Found ZLIB: /usr/lib64/libz.so.1 (found version "1.2.11") [exec] -- Looking for sync_file_range [exec] -- Looking for sync_file_range - found [exec] -- Looking for posix_fadvise [exec] -- Looking for posix_fadvise - found [exec] -- Looking for dlopen in dl [exec] -- Looking for dlopen in dl - foundCUSTOM_OPENSSL_PREFIX = [exec] [exec] -- Performing Test HAS_NEW_ENOUGH_OPENSSL [exec] -- Performing Test HAS_NEW_ENOUGH_OPENSSL - Success [exec] -- Configuring done [exec] -- Generating done [exec] -- Build files have been written to: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native [exec] CMake Warning: [exec] Manually-specified variables were not used by the project: [exec] [exec] REQUIRE_OPENSSL [exec] REQUIRE_SNAPPY [exec] [exec] [exec] /usr/bin/cmake -H/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -B/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native --check-build-system CMakeFiles/Makefile.cmake 0 [exec] /usr/bin/cmake -E cmake_progress_start /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/progress.marks [exec] make -f CMakeFiles/Makefile2 all [exec] make[1]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] make -f CMakeFiles/test_bulk_crc32.dir/build.make CMakeFiles/test_bulk_crc32.dir/depend [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] cd /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/test_bulk_crc32.dir/DependInfo.cmake --color= [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/test_bulk_crc32.dir/DependInfo.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/test_bulk_crc32.dir/depend.internal". [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/test_bulk_crc32.dir/depend.internal". [exec] Scanning dependencies of target test_bulk_crc32 [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] make -f CMakeFiles/test_bulk_crc32.dir/build.make CMakeFiles/test_bulk_crc32.dir/build [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [ 1%] Building C object CMakeFiles/test_bulk_crc32.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/test_bulk_crc32.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c:53:13: warning: ‘pipelined_crc32c’ used but never defined [exec] static void pipelined_crc32c(uint32_t *crc1, uint32_t *crc2, uint32_t *crc3, const uint8_t *p_buf, size_t block_size, int num_blocks); [exec] ^~~~~~~~~~~~~~~~ [exec] [ 3%] Building C object CMakeFiles/test_bulk_crc32.dir/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/test_bulk_crc32.dir/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c [exec] [ 5%] Linking C executable test_bulk_crc32 [exec] /usr/bin/cmake -E cmake_link_script CMakeFiles/test_bulk_crc32.dir/link.txt --verbose=1 [exec] /usr/bin/cc -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -Wl,-z,relro -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -rdynamic CMakeFiles/test_bulk_crc32.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o CMakeFiles/test_bulk_crc32.dir/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c.o -o test_bulk_crc32 [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [ 5%] Built target test_bulk_crc32 [exec] make -f CMakeFiles/hadoop_static.dir/build.make CMakeFiles/hadoop_static.dir/depend [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] cd /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop_static.dir/DependInfo.cmake --color= [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop_static.dir/DependInfo.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop_static.dir/depend.internal". [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop_static.dir/depend.internal". [exec] Scanning dependencies of target hadoop_static [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] make -f CMakeFiles/hadoop_static.dir/build.make CMakeFiles/hadoop_static.dir/build [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [ 7%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/exception.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/exception.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/exception.c [exec] [ 9%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c [exec] [ 11%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c [exec] [ 13%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c [exec] [ 15%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c [exec] [ 16%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c [exec] [ 18%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c [exec] [ 20%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c [exec] [ 22%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c [exec] In file included from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c:19: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h:33: warning: "UNUSED" redefined [exec] #define UNUSED(x) ((void)(x)) [exec] [exec] In file included from /usr/lib/jvm/java-1.8.0-openjdk/include/jni.h:45, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h:67, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h:22, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c:19: [exec] /usr/lib/jvm/java-1.8.0-openjdk/include/linux/jni_md.h:40: note: this is the location of the previous definition [exec] #define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) [exec] [exec] [ 24%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c [exec] [ 26%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c [exec] [ 28%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c: In function ‘Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFileWithMode0’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c:596:1: warning: control reaches end of non-void function [-Wreturn-type] [exec] } [exec] ^ [exec] [ 30%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c [exec] [ 32%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c [exec] [ 33%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c [exec] [ 35%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c: In function ‘setup’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c:156:26: warning: argument to ‘sizeof’ in ‘memset’ call is the same expression as the destination; did you mean to remove the addressof? [-Wsizeof-pointer-memaccess] [exec] memset(&addr, 0, sizeof(&addr)); [exec] ^ [exec] [ 37%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c [exec] [ 39%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c [exec] [ 41%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c [exec] [ 43%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_group_info.c [exec] [ 45%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c [exec] [ 47%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c [exec] [ 49%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c [exec] In file included from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:32: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c: In function ‘Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSums’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:171:21: warning: suggest parentheses around ‘&&’ within ‘||’ [-Wparentheses] [exec] if (likely(verify && ret == CHECKSUMS_VALID || !verify && ret == 0)) { [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h:23:43: note: in definition of macro ‘likely’ [exec] #define likely(x) __builtin_expect((x),1) [exec] ^ [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c: In function ‘Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSumsByteArray’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:264:32: warning: suggest parentheses around ‘&&’ within ‘||’ [-Wparentheses] [exec] } else if (unlikely(verify && ret != CHECKSUMS_VALID || !verify && ret != 0)) { [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h:24:43: note: in definition of macro ‘unlikely’ [exec] #define unlikely(x) __builtin_expect((x),0) [exec] ^ [exec] [ 50%] Building C object CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o [exec] /usr/bin/cc -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c:53:13: warning: ‘pipelined_crc32c’ used but never defined [exec] static void pipelined_crc32c(uint32_t *crc1, uint32_t *crc2, uint32_t *crc3, const uint8_t *p_buf, size_t block_size, int num_blocks); [exec] ^~~~~~~~~~~~~~~~ [exec] [ 52%] Linking C static library target/usr/local/lib/libhadoop.a [exec] /usr/bin/cmake -P CMakeFiles/hadoop_static.dir/cmake_clean_target.cmake [exec] /usr/bin/cmake -E cmake_link_script CMakeFiles/hadoop_static.dir/link.txt --verbose=1 [exec] /usr/bin/ar qc target/usr/local/lib/libhadoop.a CMakeFiles/hadoop_static.dir/main/native/src/exception.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o CMakeFiles/hadoop_static.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o [exec] /usr/bin/ranlib target/usr/local/lib/libhadoop.a [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [ 52%] Built target hadoop_static [exec] make -f CMakeFiles/hadoop.dir/build.make CMakeFiles/hadoop.dir/depend [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] cd /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native && /usr/bin/cmake -E cmake_depends "Unix Makefiles" /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop.dir/DependInfo.cmake --color= [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop.dir/DependInfo.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop.dir/depend.internal". [exec] Dependee "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles/hadoop.dir/depend.internal". [exec] Scanning dependencies of target hadoop [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] make -f CMakeFiles/hadoop.dir/build.make CMakeFiles/hadoop.dir/build [exec] make[2]: Entering directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [ 54%] Building C object CMakeFiles/hadoop.dir/main/native/src/exception.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/exception.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/exception.c [exec] [ 56%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c [exec] [ 58%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c [exec] [ 60%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c [exec] [ 62%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c [exec] [ 64%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c [exec] [ 66%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c [exec] [ 67%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c [exec] [ 69%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c [exec] In file included from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c:19: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h:33: warning: "UNUSED" redefined [exec] #define UNUSED(x) ((void)(x)) [exec] [exec] In file included from /usr/lib/jvm/java-1.8.0-openjdk/include/jni.h:45, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h:67, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/org_apache_hadoop_crypto_random.h:22, [exec] from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c:19: [exec] /usr/lib/jvm/java-1.8.0-openjdk/include/linux/jni_md.h:40: note: this is the location of the previous definition [exec] #define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) [exec] [exec] [ 71%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c [exec] [ 73%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c [exec] [ 75%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c: In function ‘Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFileWithMode0’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c:596:1: warning: control reaches end of non-void function [-Wreturn-type] [exec] } [exec] ^ [exec] [ 77%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c [exec] [ 79%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c [exec] [ 81%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c [exec] [ 83%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c: In function ‘setup’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c:156:26: warning: argument to ‘sizeof’ in ‘memset’ call is the same expression as the destination; did you mean to remove the addressof? [-Wsizeof-pointer-memaccess] [exec] memset(&addr, 0, sizeof(&addr)); [exec] ^ [exec] [ 84%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c [exec] [ 86%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c [exec] [ 88%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c [exec] [ 90%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_group_info.c [exec] [ 92%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c [exec] [ 94%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c [exec] [ 96%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c [exec] In file included from /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:32: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c: In function ‘Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSums’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:171:21: warning: suggest parentheses around ‘&&’ within ‘||’ [-Wparentheses] [exec] if (likely(verify && ret == CHECKSUMS_VALID || !verify && ret == 0)) { [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h:23:43: note: in definition of macro ‘likely’ [exec] #define likely(x) __builtin_expect((x),1) [exec] ^ [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c: In function ‘Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSumsByteArray’: [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c:264:32: warning: suggest parentheses around ‘&&’ within ‘||’ [-Wparentheses] [exec] } else if (unlikely(verify && ret != CHECKSUMS_VALID || !verify && ret != 0)) { [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h:24:43: note: in definition of macro ‘unlikely’ [exec] #define unlikely(x) __builtin_expect((x),0) [exec] ^ [exec] [ 98%] Building C object CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o [exec] /usr/bin/cc -Dhadoop_EXPORTS -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/javah -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/src -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native -I/usr/lib/jvm/java-1.8.0-openjdk/include -I/usr/lib/jvm/java-1.8.0-openjdk/include/linux -I/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -fPIC -o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o -c /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c:53:13: warning: ‘pipelined_crc32c’ used but never defined [exec] static void pipelined_crc32c(uint32_t *crc1, uint32_t *crc2, uint32_t *crc3, const uint8_t *p_buf, size_t block_size, int num_blocks); [exec] ^~~~~~~~~~~~~~~~ [exec] [100%] Linking C shared library target/usr/local/lib/libhadoop.so [exec] /usr/bin/cmake -E cmake_link_script CMakeFiles/hadoop.dir/link.txt --verbose=1 [exec] /usr/bin/cc -fPIC -O2 -g -pipe -Wall -Werror=format-security -Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fexceptions -fstack-protector-strong -grecord-gcc-switches -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -fasynchronous-unwind-tables -fstack-clash-protection -g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -Wl,-z,relro -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -shared -Wl,-soname,libhadoop.so.1.0.0 -o target/usr/local/lib/libhadoop.so.1.0.0 CMakeFiles/hadoop.dir/main/native/src/exception.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/crypto/random/OpensslSecureRandom.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/errno_enum.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_group_info.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/security/hadoop_user_info.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/NativeCrc32.c.o CMakeFiles/hadoop.dir/main/native/src/org/apache/hadoop/util/bulk_crc32.c.o -Wl,-rpath,"\$ORIGIN/" -ldl [exec] /usr/bin/ld: CMakeFiles/hadoop.dir/main/native/src/exception.c.o: in function `terror':/usr/bin/cmake -E cmake_symlink_library target/usr/local/lib/libhadoop.so.1.0.0 target/usr/local/lib/libhadoop.so.1.0.0 target/usr/local/lib/libhadoop.so [exec] [exec] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/exception.c:121: warning: `sys_errlist' is deprecated; use `strerror' or `strerror_r' instead [exec] /usr/bin/ld: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/main/native/src/exception.c:118: warning: `sys_nerr' is deprecated; use `strerror' or `strerror_r' instead [exec] make[2]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] [100%] Built target hadoop [exec] make[1]: Leaving directory '/builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native' [exec] /usr/bin/cmake -E cmake_progress_start /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/native/CMakeFiles 0 [exec] [ 5%] Built target test_bulk_crc32 [exec] [ 52%] Built target hadoop_static [exec] [100%] Built target hadoop [INFO] Executed tasks [INFO] [INFO] --- avro-maven-plugin:1.7.6:schema (generate-avro-test-sources) @ hadoop-common --- [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:protoc (compile-test-protoc) @ hadoop-common --- [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-common --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 22 resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-log-dir) @ hadoop-common --- [INFO] Executing tasks main: [delete] Deleting directory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test/data [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test/data [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/log [copy] Copying 6 files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test-classes [INFO] Executed tasks [INFO] [INFO] --- maven-antrun-plugin:1.8:run (copy-test-tarballs) @ hadoop-common --- [INFO] Executing tasks main: [copy] Copying 2 files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test-classes [INFO] Executed tasks [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-common --- [INFO] Compiling 473 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/test-classes [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java: Some input files use or override a deprecated API. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java: Recompile with -Xlint:deprecation for details. [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-common --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-antrun-plugin:1.8:run (native_tests) @ hadoop-common --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-jar-plugin:3.1.0:test-jar (prepare-test-jar) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6-tests.jar [INFO] [INFO] >>> maven-source-plugin:3.0.1:jar (default) > generate-sources @ hadoop-common >>> [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (enforce-os) @ hadoop-common --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-common --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:protoc (compile-protoc) @ hadoop-common --- [INFO] [INFO] <<< maven-source-plugin:3.0.1:jar (default) < generate-sources @ hadoop-common <<< [INFO] [INFO] [INFO] --- maven-source-plugin:3.0.1:jar (default) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6-sources.jar [INFO] [INFO] >>> maven-source-plugin:3.0.1:test-jar (default) > generate-sources @ hadoop-common >>> [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (enforce-os) @ hadoop-common --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-common --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:protoc (compile-protoc) @ hadoop-common --- [INFO] [INFO] <<< maven-source-plugin:3.0.1:test-jar (default) < generate-sources @ hadoop-common <<< [INFO] [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar (default) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6-test-sources.jar [INFO] [INFO] --- maven-antrun-plugin:1.8:run (pre-dist) @ hadoop-common --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-assembly-plugin:3.1.0:single (dist) @ hadoop-common --- [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /bin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /etc/hadoop [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /libexec [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /sbin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /sbin [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/doc/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/webapps [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/templates [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/templates/conf [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/sources [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/jdiff [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/${hadoop.component}/jdiff [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/doc/hadoop/${hadoop.component} [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /include [WARNING] The following patterns were never triggered in this artifact exclusion filter: o 'org.apache.ant:*:jar' o 'jdiff:jdiff:jar' [INFO] Copying files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6 [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-common --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-common/target/hadoop-common-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-common --- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (tar) @ hadoop-common --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-common --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-common --- [INFO] [INFO] --------------------< org.apache.hadoop:hadoop-nfs >-------------------- [INFO] Building Apache Hadoop NFS 2.7.6 [11/61] [INFO] --------------------------------[ jar ]--------------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-nfs --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-nfs --- [WARNING] Missing POM for com.centerkey.utils:BareBonesBrowserLaunch:jar:3.1 [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-nfs --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/src/main/resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-nfs --- [INFO] Compiling 94 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/classes [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java: Some input files use or override a deprecated API. [INFO] /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java: Recompile with -Xlint:deprecation for details. [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ hadoop-nfs --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 1 resource [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:testCompile (default-testCompile) @ hadoop-nfs --- [INFO] Compiling 14 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/test-classes [INFO] [INFO] --- maven-surefire-plugin:2.22.0:test (default-test) @ hadoop-nfs --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:3.1.0:jar (default-jar) @ hadoop-nfs --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-2.7.6.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-nfs --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-2.7.6-sources.jar [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-nfs --- [INFO] Building jar: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-2.7.6-test-sources.jar [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-nfs --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-nfs --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-assembly-plugin:3.1.0:single (dist) @ hadoop-nfs --- [INFO] Reading assembly descriptor: ../../hadoop-assemblies/src/main/resources/assemblies/hadoop-nfs-dist.xml [WARNING] The assembly descriptor contains a filesystem-root relative reference, which is not cross platform compatible /share/hadoop/common [WARNING] The following patterns were never triggered in this artifact exclusion filter: o 'org.apache.hadoop:hadoop-common' o 'org.apache.hadoop:hadoop-hdfs' o 'org.hsqldb:hsqldb' [INFO] Copying files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-2.7.6 [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-nfs --- [INFO] [INFO] --------------< org.apache.hadoop:hadoop-common-project >--------------- [INFO] Building Apache Hadoop Common Project 2.7.6 [12/61] [INFO] --------------------------------[ pom ]--------------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-common-project --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-common-project/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-common-project --- [INFO] [INFO] --- maven-source-plugin:3.0.1:jar-no-fork (hadoop-java-sources) @ hadoop-common-project --- [INFO] [INFO] --- maven-source-plugin:3.0.1:test-jar-no-fork (hadoop-java-sources) @ hadoop-common-project --- [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (dist-enforce) @ hadoop-common-project --- [INFO] [INFO] --- maven-javadoc-plugin:3.0.1:jar (module-javadocs) @ hadoop-common-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.4.1:enforce (depcheck) @ hadoop-common-project --- [INFO] [INFO] -------------------< org.apache.hadoop:hadoop-hdfs >-------------------- [INFO] Building Apache Hadoop HDFS 2.7.6 [13/61] [INFO] --------------------------------[ jar ]--------------------------------- [INFO] [INFO] --- maven-antrun-plugin:1.8:run (create-testdirs) @ hadoop-hdfs --- [INFO] Executing tasks main: [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-hdfs-project/hadoop-hdfs/target/test-dir [mkdir] Created dir: /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-hdfs-project/hadoop-hdfs/target/test/data [INFO] Executed tasks [INFO] [INFO] --- hadoop-maven-plugins:2.7.6:protoc (compile-protoc) @ hadoop-hdfs --- [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hadoop-hdfs --- [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ hadoop-hdfs --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 4 resources [INFO] Copying 2 resources [INFO] [INFO] --- maven-compiler-plugin:3.8.0:compile (default-compile) @ hadoop-hdfs --- [INFO] Compiling 682 source files to /builddir/build/BUILD/hadoop-2.7.6-src/hadoop-hdfs-project/hadoop-hdfs/target/classes BUILDSTDERR: The system is out of resources. BUILDSTDERR: Consult the following stack trace for details. BUILDSTDERR: java.lang.OutOfMemoryError: Java heap space BUILDSTDERR: at com.sun.tools.javac.util.Position$LineMapImpl.build(Position.java:153) BUILDSTDERR: at com.sun.tools.javac.util.Position.makeLineMap(Position.java:77) BUILDSTDERR: at com.sun.tools.javac.parser.JavaTokenizer.getLineMap(JavaTokenizer.java:763) BUILDSTDERR: at com.sun.tools.javac.parser.Scanner.getLineMap(Scanner.java:127) BUILDSTDERR: at com.sun.tools.javac.parser.JavacParser.parseCompilationUnit(JavacParser.java:3173) BUILDSTDERR: at com.sun.tools.javac.main.JavaCompiler.parse(JavaCompiler.java:628) BUILDSTDERR: at com.sun.tools.javac.main.JavaCompiler.parse(JavaCompiler.java:665) BUILDSTDERR: at com.sun.tools.javac.main.JavaCompiler.parseFiles(JavaCompiler.java:950) BUILDSTDERR: at com.sun.tools.javac.main.JavaCompiler.compile(JavaCompiler.java:857) BUILDSTDERR: at com.sun.tools.javac.main.Main.compile(Main.java:523) BUILDSTDERR: at com.sun.tools.javac.api.JavacTaskImpl.doCall(JavacTaskImpl.java:129) BUILDSTDERR: at com.sun.tools.javac.api.JavacTaskImpl.call(JavacTaskImpl.java:138) BUILDSTDERR: at org.codehaus.plexus.compiler.javac.JavaxToolsCompiler.compileInProcess(JavaxToolsCompiler.java:126) BUILDSTDERR: at org.codehaus.plexus.compiler.javac.JavacCompiler.performCompile(JavacCompiler.java:174) BUILDSTDERR: at org.apache.maven.plugin.compiler.AbstractCompilerMojo.execute(AbstractCompilerMojo.java:1129) BUILDSTDERR: at org.apache.maven.plugin.compiler.CompilerMojo.execute(CompilerMojo.java:188) BUILDSTDERR: at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:137) BUILDSTDERR: at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:208) BUILDSTDERR: at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:154) BUILDSTDERR: at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:146) BUILDSTDERR: at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:117) BUILDSTDERR: at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:81) BUILDSTDERR: at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build(SingleThreadedBuilder.java:56) BUILDSTDERR: at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:128) BUILDSTDERR: at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:305) BUILDSTDERR: at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:192) BUILDSTDERR: at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:105) BUILDSTDERR: at org.apache.maven.cli.MavenCli.execute(MavenCli.java:954) BUILDSTDERR: at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:288) BUILDSTDERR: at org.apache.maven.cli.MavenCli.main(MavenCli.java:192) BUILDSTDERR: at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) BUILDSTDERR: at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) [INFO] ------------------------------------------------------------- [ERROR] COMPILATION ERROR : [INFO] ------------------------------------------------------------- [ERROR] An unknown compilation problem occurred [INFO] 1 error [INFO] ------------------------------------------------------------- [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Skipping Apache Hadoop Main [INFO] This project has been banned from the build due to previous failures. [INFO] ------------------------------------------------------------------------ [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Apache Hadoop Main 2.7.6 ........................... SUCCESS [ 24.440 s] [INFO] Apache Hadoop Build Tools .......................... SUCCESS [ 51.463 s] [INFO] Apache Hadoop Project POM .......................... SUCCESS [ 47.644 s] [INFO] Apache Hadoop Annotations .......................... SUCCESS [ 43.130 s] [INFO] Apache Hadoop Assemblies ........................... SUCCESS [ 9.439 s] [INFO] Apache Hadoop Project Dist POM ..................... SUCCESS [ 46.452 s] [INFO] Apache Hadoop Maven Plugins ........................ SUCCESS [01:23 min] [INFO] Apache Hadoop Auth ................................. SUCCESS [01:43 min] [INFO] Apache Hadoop Auth Examples ........................ SUCCESS [ 48.648 s] [INFO] Apache Hadoop Common ............................... SUCCESS [57:34 min] [INFO] Apache Hadoop NFS .................................. SUCCESS [02:11 min] [INFO] Apache Hadoop Common Project ....................... SUCCESS [ 5.231 s] [INFO] Apache Hadoop HDFS ................................. FAILURE [09:15 min] [INFO] Apache Hadoop HttpFS ............................... SKIPPED [INFO] Apache Hadoop HDFS BookKeeper Journal .............. SKIPPED [INFO] Apache Hadoop HDFS-NFS ............................. SKIPPED [INFO] Apache Hadoop HDFS Project ......................... SKIPPED [INFO] hadoop-yarn ........................................ SKIPPED [INFO] hadoop-yarn-api .................................... SKIPPED [INFO] hadoop-yarn-common ................................. SKIPPED [INFO] hadoop-yarn-server ................................. SKIPPED [INFO] hadoop-yarn-server-common .......................... SKIPPED [INFO] hadoop-yarn-server-nodemanager ..................... SKIPPED [INFO] hadoop-yarn-server-web-proxy ....................... SKIPPED [INFO] hadoop-yarn-server-applicationhistoryservice ....... SKIPPED [INFO] hadoop-yarn-server-resourcemanager ................. SKIPPED [INFO] hadoop-yarn-server-tests ........................... SKIPPED [INFO] hadoop-yarn-client ................................. SKIPPED [INFO] hadoop-yarn-server-sharedcachemanager .............. SKIPPED [INFO] hadoop-yarn-applications ........................... SKIPPED [INFO] hadoop-yarn-applications-distributedshell .......... SKIPPED [INFO] hadoop-yarn-applications-unmanaged-am-launcher ..... SKIPPED [INFO] hadoop-yarn-site ................................... SKIPPED [INFO] hadoop-yarn-registry ............................... SKIPPED [INFO] hadoop-yarn-project ................................ SKIPPED [INFO] hadoop-mapreduce-client ............................ SKIPPED [INFO] hadoop-mapreduce-client-core ....................... SKIPPED [INFO] hadoop-mapreduce-client-common ..................... SKIPPED [INFO] hadoop-mapreduce-client-shuffle .................... SKIPPED [INFO] hadoop-mapreduce-client-app ........................ SKIPPED [INFO] hadoop-mapreduce-client-hs ......................... SKIPPED [INFO] hadoop-mapreduce-client-jobclient .................. SKIPPED [INFO] hadoop-mapreduce-client-hs-plugins ................. SKIPPED [INFO] Apache Hadoop MapReduce Examples ................... SKIPPED [INFO] hadoop-mapreduce ................................... SKIPPED [INFO] Apache Hadoop MapReduce Streaming .................. SKIPPED [INFO] Apache Hadoop Distributed Copy ..................... SKIPPED [INFO] Apache Hadoop Archives ............................. SKIPPED [INFO] Apache Hadoop Rumen ................................ SKIPPED [INFO] Apache Hadoop Gridmix .............................. SKIPPED [INFO] Apache Hadoop Data Join ............................ SKIPPED [INFO] Apache Hadoop Ant Tasks ............................ SKIPPED [INFO] Apache Hadoop Extras ............................... SKIPPED [INFO] Apache Hadoop OpenStack support .................... SKIPPED [INFO] Apache Hadoop Amazon Web Services support .......... SKIPPED [INFO] Apache Hadoop Client ............................... SKIPPED [INFO] Apache Hadoop Mini-Cluster ......................... SKIPPED [INFO] Apache Hadoop Scheduler Load Simulator ............. SKIPPED [INFO] Apache Hadoop Tools Dist ........................... SKIPPED [INFO] Apache Hadoop Tools ................................ SKIPPED [INFO] Apache Hadoop Distribution 2.7.6 ................... SKIPPED [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 01:19 h [INFO] Finished at: 2018-09-17T22:27:35Z [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.8.0:compile (default-compile) on project hadoop-hdfs: Compilation failure [ERROR] An unknown compilation problem occurred [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn -rf :hadoop-hdfs RPM build errors: BUILDSTDERR: error: Bad exit status from /var/tmp/rpm-tmp.SdyeX5 (%build) BUILDSTDERR: Bad exit status from /var/tmp/rpm-tmp.SdyeX5 (%build) Child return code was: 1 EXCEPTION: [Error()] Traceback (most recent call last): File "/usr/lib/python3.6/site-packages/mockbuild/trace_decorator.py", line 96, in trace result = func(*args, **kw) File "/usr/lib/python3.6/site-packages/mockbuild/util.py", line 626, in do raise exception.Error("Command failed: \n # %s\n%s" % (command, output), child.returncode) mockbuild.exception.Error: Command failed: # bash --login -c /usr/bin/rpmbuild -bb --target riscv64 --nodeps /builddir/build/SPECS/hadoop.spec