From aede94115e077e87504b03bf668ef375290200ad Mon Sep 17 00:00:00 2001 From: Michal Marek Date: Sat, 10 Sep 2016 23:13:25 +0200 Subject: [PATCH] find-debuginfo.sh: Process files in parallel Add a -j option, which, when used, will spawn processes to do the debuginfo extraction in parallel. A pipe is used to dispatch the files among the processes. Signed-off-by: Michal Marek (cherry picked from commit 1b338aa84d4c67fefa957352a028eaca1a45d1f6) Conflicts: macros.in scripts/find-debuginfo.sh --- macros.in | 2 +- scripts/find-debuginfo.sh | 63 ++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 61 insertions(+), 4 deletions(-) diff --git a/macros.in b/macros.in index c5b1a0b26..93e360c79 100644 --- a/macros.in +++ b/macros.in @@ -180,7 +180,7 @@ # the script. See the script for details. # %__debug_install_post \ - %{_rpmconfigdir}/find-debuginfo.sh %{?_missing_build_ids_terminate_build:--strict-build-id} %{?_no_recompute_build_ids:-n} %{?_include_minidebuginfo:-m} %{?_include_gdb_index:-i} %{?_unique_build_ids:--ver-rel "%{VERSION}-%{RELEASE}"} %{?_unique_debug_names:--unique-debug-arch "%{_arch}"} %{?_unique_debug_srcs:--unique-debug-src-base "%{name}"} %{?_find_debuginfo_dwz_opts} %{?_find_debuginfo_opts} "%{_builddir}/%{?buildsubdir}"\ + %{_rpmconfigdir}/find-debuginfo.sh %{?_smp_mflags} %{?_missing_build_ids_terminate_build:--strict-build-id} %{?_no_recompute_build_ids:-n} %{?_include_minidebuginfo:-m} %{?_include_gdb_index:-i} %{?_unique_build_ids:--ver-rel "%{VERSION}-%{RELEASE}"} %{?_unique_debug_names:--unique-debug-arch "%{_arch}"} %{?_unique_debug_srcs:--unique-debug-src-base "%{name}"} %{?_find_debuginfo_dwz_opts} %{?_find_debuginfo_opts} "%{_builddir}/%{?buildsubdir}"\ %{nil} # Template for debug information sub-package. diff --git a/scripts/find-debuginfo.sh b/scripts/find-debuginfo.sh index c435a02e4..d8b718bdf 100644 --- a/scripts/find-debuginfo.sh +++ b/scripts/find-debuginfo.sh @@ -74,6 +74,9 @@ unique_debug_arch= # Base given by --unique-debug-src-base unique_debug_src_base= +# Number of parallel jobs to spawn +n_jobs=1 + BUILDDIR=. out=debugfiles.list nout=0 @@ -137,6 +140,13 @@ while [ $# -gt 0 ]; do -r) strip_r=true ;; + -j) + n_jobs=$2 + shift + ;; + -j*) + n_jobs=${1#-j} + ;; *) BUILDDIR=$1 shift @@ -389,9 +399,56 @@ do_file() fi } -while read nlinks inum f; do - do_file "$nlinks" "$inum" "$f" -done <"$temp/primary" +# 16^6 - 1 or about 16 milion files +FILENUM_DIGITS=6 +run_job() +{ + local jobid=$1 filenum + local SOURCEFILE=$temp/debugsources.$jobid ELFBINSFILE=$temp/elfbins.$jobid + + >"$SOURCEFILE" + >"$ELFBINSFILE" + # can't use read -n , because it reads bytes one by one, allowing for + # races + while :; do + filenum=$(dd bs=$(( FILENUM_DIGITS + 1 )) count=1 status=none) + if test -z "$filenum"; then + break + fi + do_file $(sed -n "$(( 0x$filenum )) p" "$temp/primary") + done + echo 0 >"$temp/res.$jobid" +} + +n_files=$(wc -l <"$temp/primary") +if [ $n_jobs -gt $n_files ]; then + n_jobs=$n_files +fi +if [ $n_jobs -le 1 ]; then + while read nlinks inum f; do + do_file "$nlinks" "$inum" "$f" + done <"$temp/primary" +else + for ((i = 1; i <= n_files; i++)); do + printf "%0${FILENUM_DIGITS}x\\n" $i + done | ( + exec 3<&0 + for ((i = 0; i < n_jobs; i++)); do + # The shell redirects stdin to /dev/null for background jobs. Work + # around this by duplicating fd 0 + run_job $i <&3 & + done + wait + ) + for f in "$temp"/res.*; do + res=$(< "$f") + if [ "$res" != "0" ]; then + exit 1 + fi + done + cat "$temp"/debugsources.* >"$SOURCEFILE" + cat "$temp"/elfbins.* >"$ELFBINSFILE" +fi # Invoke the DWARF Compressor utility. if $run_dwz \