Fix rhbz#1597446 (container-executor builds) and rhbz#1593020 (CVE-2018-8009)
This commit is contained in:
parent
7adcfbaf3f
commit
7367791e91
|
@ -0,0 +1,237 @@
|
|||
commit 45a1c680c276c4501402f7bc4cebcf85a6fbc7f5
|
||||
Author: Akira Ajisaka <aajisaka@apache.org>
|
||||
Date: Wed May 23 17:21:46 2018 +0900
|
||||
|
||||
Additional check when unpacking archives. Contributed by Jason Lowe and Akira Ajisaka.
|
||||
|
||||
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
|
||||
index 23fb9462449..00381fee278 100644
|
||||
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
|
||||
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
|
||||
@@ -587,16 +587,21 @@ public static long getDU(File dir) {
|
||||
public static void unZip(File inFile, File unzipDir) throws IOException {
|
||||
Enumeration<? extends ZipEntry> entries;
|
||||
ZipFile zipFile = new ZipFile(inFile);
|
||||
+ String targetDirPath = unzipDir.getCanonicalPath() + File.separator;
|
||||
|
||||
try {
|
||||
entries = zipFile.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
if (!entry.isDirectory()) {
|
||||
+ File file = new File(unzipDir, entry.getName());
|
||||
+ if (!file.getCanonicalPath().startsWith(targetDirPath)) {
|
||||
+ throw new IOException("expanding " + entry.getName()
|
||||
+ + " would create file outside of " + unzipDir);
|
||||
+ }
|
||||
InputStream in = zipFile.getInputStream(entry);
|
||||
try {
|
||||
- File file = new File(unzipDir, entry.getName());
|
||||
- if (!file.getParentFile().mkdirs()) {
|
||||
+ if (!file.getParentFile().mkdirs()) {
|
||||
if (!file.getParentFile().isDirectory()) {
|
||||
throw new IOException("Mkdirs failed to create " +
|
||||
file.getParentFile().toString());
|
||||
@@ -705,6 +710,13 @@ private static void unTarUsingJava(File inFile, File untarDir,
|
||||
|
||||
private static void unpackEntries(TarArchiveInputStream tis,
|
||||
TarArchiveEntry entry, File outputDir) throws IOException {
|
||||
+ String targetDirPath = outputDir.getCanonicalPath() + File.separator;
|
||||
+ File outputFile = new File(outputDir, entry.getName());
|
||||
+ if (!outputFile.getCanonicalPath().startsWith(targetDirPath)) {
|
||||
+ throw new IOException("expanding " + entry.getName()
|
||||
+ + " would create entry outside of " + outputDir);
|
||||
+ }
|
||||
+
|
||||
if (entry.isDirectory()) {
|
||||
File subDir = new File(outputDir, entry.getName());
|
||||
if (!subDir.mkdirs() && !subDir.isDirectory()) {
|
||||
@@ -719,7 +731,6 @@ private static void unpackEntries(TarArchiveInputStream tis,
|
||||
return;
|
||||
}
|
||||
|
||||
- File outputFile = new File(outputDir, entry.getName());
|
||||
if (!outputFile.getParentFile().exists()) {
|
||||
if (!outputFile.getParentFile().mkdirs()) {
|
||||
throw new IOException("Mkdirs failed to create tar internal dir "
|
||||
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
|
||||
index 41794b85314..7712535c669 100644
|
||||
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
|
||||
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
|
||||
@@ -25,8 +25,9 @@
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
-import java.net.URI;
|
||||
import java.io.PrintWriter;
|
||||
+import java.net.URI;
|
||||
+import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@@ -40,6 +41,7 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
+import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.tools.tar.TarEntry;
|
||||
@@ -708,10 +710,8 @@ public void testCreateLocalTempFile() throws IOException {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testUnZip() throws IOException {
|
||||
- // make sa simple zip
|
||||
setupDirs();
|
||||
-
|
||||
- // make a simple tar:
|
||||
+ // make a simple zip
|
||||
final File simpleZip = new File(del, FILE);
|
||||
OutputStream os = new FileOutputStream(simpleZip);
|
||||
ZipOutputStream tos = new ZipOutputStream(os);
|
||||
@@ -728,7 +728,7 @@ public void testUnZip() throws IOException {
|
||||
tos.close();
|
||||
}
|
||||
|
||||
- // successfully untar it into an existing dir:
|
||||
+ // successfully unzip it into an existing dir:
|
||||
FileUtil.unZip(simpleZip, tmp);
|
||||
// check result:
|
||||
assertTrue(new File(tmp, "foo").exists());
|
||||
@@ -743,8 +743,36 @@ public void testUnZip() throws IOException {
|
||||
} catch (IOException ioe) {
|
||||
// okay
|
||||
}
|
||||
- }
|
||||
-
|
||||
+ }
|
||||
+
|
||||
+ @Test (timeout = 30000)
|
||||
+ public void testUnZip2() throws IOException {
|
||||
+ setupDirs();
|
||||
+ // make a simple zip
|
||||
+ final File simpleZip = new File(del, FILE);
|
||||
+ OutputStream os = new FileOutputStream(simpleZip);
|
||||
+ try (ZipOutputStream tos = new ZipOutputStream(os)) {
|
||||
+ // Add an entry that contains invalid filename
|
||||
+ ZipEntry ze = new ZipEntry("../foo");
|
||||
+ byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
|
||||
+ ze.setSize(data.length);
|
||||
+ tos.putNextEntry(ze);
|
||||
+ tos.write(data);
|
||||
+ tos.closeEntry();
|
||||
+ tos.flush();
|
||||
+ tos.finish();
|
||||
+ }
|
||||
+
|
||||
+ // Unzip it into an existing dir
|
||||
+ try {
|
||||
+ FileUtil.unZip(simpleZip, tmp);
|
||||
+ Assert.fail("unZip should throw IOException.");
|
||||
+ } catch (IOException e) {
|
||||
+ GenericTestUtils.assertExceptionContains(
|
||||
+ "would create file outside of", e);
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
@Test (timeout = 30000)
|
||||
/*
|
||||
* Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf)
|
||||
commit eaa2b8035b584dfcf7c79a33484eb2dffd3fdb11
|
||||
Author: Kihwal Lee <kihwal@apache.org>
|
||||
Date: Tue May 29 14:47:55 2018 -0500
|
||||
|
||||
Additional check when unpacking archives. Contributed by Wilfred Spiegelenburg.
|
||||
|
||||
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
|
||||
index 4b26b7611d6..a3b5b0bbd94 100644
|
||||
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
|
||||
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
|
||||
@@ -93,6 +93,7 @@ public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
|
||||
throws IOException {
|
||||
JarFile jar = new JarFile(jarFile);
|
||||
try {
|
||||
+ String targetDirPath = toDir.getCanonicalPath() + File.separator;
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
final JarEntry entry = entries.nextElement();
|
||||
@@ -102,6 +103,10 @@ public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
|
||||
try {
|
||||
File file = new File(toDir, entry.getName());
|
||||
ensureDirectory(file.getParentFile());
|
||||
+ if (!file.getCanonicalPath().startsWith(targetDirPath)) {
|
||||
+ throw new IOException("expanding " + entry.getName()
|
||||
+ + " would create file outside of " + toDir);
|
||||
+ }
|
||||
OutputStream out = new FileOutputStream(file);
|
||||
try {
|
||||
IOUtils.copyBytes(in, out, 8192);
|
||||
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
|
||||
index f592d0400a4..b2a65379eda 100644
|
||||
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
|
||||
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
|
||||
@@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
+import static org.junit.Assert.fail;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@@ -25,6 +26,8 @@
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
+import java.nio.charset.StandardCharsets;
|
||||
+import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarOutputStream;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.ZipEntry;
|
||||
@@ -32,6 +35,7 @@
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
+import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -169,4 +173,37 @@ private File makeClassLoaderTestJar(String... clsNames) throws IOException {
|
||||
|
||||
return jarFile;
|
||||
}
|
||||
-}
|
||||
\ No newline at end of file
|
||||
+
|
||||
+ @Test
|
||||
+ public void testUnJar2() throws IOException {
|
||||
+ // make a simple zip
|
||||
+ File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
|
||||
+ JarOutputStream jstream =
|
||||
+ new JarOutputStream(new FileOutputStream(jarFile));
|
||||
+ JarEntry je = new JarEntry("META-INF/MANIFEST.MF");
|
||||
+ byte[] data = "Manifest-Version: 1.0\nCreated-By: 1.8.0_1 (Manual)"
|
||||
+ .getBytes(StandardCharsets.UTF_8);
|
||||
+ je.setSize(data.length);
|
||||
+ jstream.putNextEntry(je);
|
||||
+ jstream.write(data);
|
||||
+ jstream.closeEntry();
|
||||
+ je = new JarEntry("../outside.path");
|
||||
+ data = "any data here".getBytes(StandardCharsets.UTF_8);
|
||||
+ je.setSize(data.length);
|
||||
+ jstream.putNextEntry(je);
|
||||
+ jstream.write(data);
|
||||
+ jstream.closeEntry();
|
||||
+ jstream.close();
|
||||
+
|
||||
+ File unjarDir = new File(TEST_ROOT_DIR, "unjar-path");
|
||||
+
|
||||
+ // Unjar everything
|
||||
+ try {
|
||||
+ RunJar.unJar(jarFile, unjarDir);
|
||||
+ fail("unJar should throw IOException.");
|
||||
+ } catch (IOException e) {
|
||||
+ GenericTestUtils.assertExceptionContains(
|
||||
+ "would create file outside of", e);
|
||||
+ }
|
||||
+ }
|
||||
+}
|
|
@ -0,0 +1,32 @@
|
|||
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt
|
||||
index ba2eae2..76daaab 100644
|
||||
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt
|
||||
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/CMakeLists.txt
|
||||
@@ -57,24 +57,12 @@ include_directories(
|
||||
)
|
||||
CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
|
||||
|
||||
-add_library(container
|
||||
+set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
+add_executable(container-executor
|
||||
+ main/native/container-executor/impl/main.c
|
||||
main/native/container-executor/impl/configuration.c
|
||||
main/native/container-executor/impl/container-executor.c
|
||||
main/native/container-executor/impl/get_executable.c
|
||||
)
|
||||
-
|
||||
-add_executable(container-executor
|
||||
- main/native/container-executor/impl/main.c
|
||||
-)
|
||||
-target_link_libraries(container-executor
|
||||
- container
|
||||
-)
|
||||
output_directory(container-executor target/usr/local/bin)
|
||||
|
||||
-add_executable(test-container-executor
|
||||
- main/native/container-executor/test/test-container-executor.c
|
||||
-)
|
||||
-target_link_libraries(test-container-executor
|
||||
- container ${EXTRA_LIBS}
|
||||
-)
|
||||
-output_directory(test-container-executor target/usr/local/bin)
|
18
hadoop.spec
18
hadoop.spec
|
@ -11,7 +11,7 @@
|
|||
|
||||
Name: hadoop
|
||||
Version: 2.7.6
|
||||
Release: 2%{?dist}
|
||||
Release: 3%{?dist}
|
||||
Summary: A software platform for processing vast amounts of data
|
||||
# The BSD license file is missing
|
||||
# https://issues.apache.org/jira/browse/HADOOP-9849
|
||||
|
@ -67,6 +67,12 @@ Patch22: %{name}-aws.patch
|
|||
# fix classpath issues
|
||||
Patch23: classpath.patch
|
||||
|
||||
# fix container-executor compilation rbhz#1597446
|
||||
Patch24: fix-container-executor-cmake.patch
|
||||
|
||||
# fix rhbz#1593020
|
||||
Patch25: backport-CVE-2018-8009.patch
|
||||
|
||||
BuildRequires: ant
|
||||
BuildRequires: antlr-tool
|
||||
BuildRequires: aopalliance
|
||||
|
@ -424,8 +430,6 @@ This package contains files needed to run Apache Hadoop YARN in secure mode.
|
|||
|
||||
%pom_xpath_set "pom:properties/pom:protobuf.version" 3.5.0 hadoop-project
|
||||
|
||||
# remove yarn-server-nodemanager native build for now (possible bug with cmake macro and build flags)
|
||||
%pom_remove_plugin :maven-antrun-plugin hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager
|
||||
%pom_xpath_inject "pom:plugin[pom:artifactId='maven-jar-plugin']/pom:executions/pom:execution[pom:phase='test-compile']" "<id>default-jar</id>" hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell
|
||||
|
||||
# Remove the maven-site-plugin. It's not needed
|
||||
|
@ -695,9 +699,6 @@ done
|
|||
# This binary is obsoleted and causes a conflict with qt-devel
|
||||
rm -rf %{buildroot}/%{_bindir}/rcc
|
||||
|
||||
# We don't care about this
|
||||
rm -f %{buildroot}/%{_bindir}/test-container-executor
|
||||
|
||||
# Duplicate files
|
||||
rm -f %{buildroot}/%{_sbindir}/hdfs-config.sh
|
||||
|
||||
|
@ -1125,8 +1126,13 @@ fi
|
|||
|
||||
%files yarn-security
|
||||
%config(noreplace) %{_sysconfdir}/%{name}/container-executor.cfg
|
||||
# Permissions set per upstream guidelines: https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/ClusterSetup.html#Configuration_in_Secure_Mode
|
||||
%attr(6050,root,yarn) %{_bindir}/container-executor
|
||||
|
||||
%changelog
|
||||
* Thu Jul 05 2018 Christopher Tubbs <ctubbsii@fedoraproject.org> - 2.7.6-3
|
||||
- Fix rhbz#1597446 (container-executor builds) and rhbz#1593020 (CVE-2018-8009)
|
||||
|
||||
* Fri Jun 29 2018 Mike Miller <mmiller@apache.org> - 2.7.6-2
|
||||
- Fix jetty version dependencies
|
||||
|
||||
|
|
Loading…
Reference in New Issue