114 lines
3.4 KiB
Diff
114 lines
3.4 KiB
Diff
commit 1b338aa84d4c67fefa957352a028eaca1a45d1f6
|
|
Author: Michal Marek <mmarek@suse.com>
|
|
Date: Sat Sep 10 23:13:25 2016 +0200
|
|
|
|
find-debuginfo.sh: Process files in parallel
|
|
|
|
Add a -j <n> option, which, when used, will spawn <n> processes to do the
|
|
debuginfo extraction in parallel. A pipe is used to dispatch the files among
|
|
the processes.
|
|
|
|
Signed-off-by: Michal Marek <mmarek@suse.com>
|
|
|
|
diff --git a/macros.in b/macros.in
|
|
index b03c5a9..8bde2d7 100644
|
|
--- a/macros.in
|
|
+++ b/macros.in
|
|
@@ -180,7 +180,7 @@
|
|
# the script. See the script for details.
|
|
#
|
|
%__debug_install_post \
|
|
- %{_rpmconfigdir}/find-debuginfo.sh %{?_missing_build_ids_terminate_build:--strict-build-id} %{?_no_recompute_build_ids:-n} %{?_include_minidebuginfo:-m} %{?_include_gdb_index:-i} %{?_unique_build_ids:--ver-rel "%{VERSION}-%{RELEASE}"} %{?_unique_debug_names:--unique-debug-arch "%{_arch}"} %{?_unique_debug_srcs:--unique-debug-src-base "%{name}"} %{?_find_debuginfo_dwz_opts} %{?_find_debuginfo_opts} "%{_builddir}/%{?buildsubdir}"\
|
|
+ %{_rpmconfigdir}/find-debuginfo.sh %{?_smp_mflags} %{?_missing_build_ids_terminate_build:--strict-build-id} %{?_no_recompute_build_ids:-n} %{?_include_minidebuginfo:-m} %{?_include_gdb_index:-i} %{?_unique_build_ids:--ver-rel "%{VERSION}-%{RELEASE}"} %{?_unique_debug_names:--unique-debug-arch "%{_arch}"} %{?_unique_debug_srcs:--unique-debug-src-base "%{name}"} %{?_find_debuginfo_dwz_opts} %{?_find_debuginfo_opts} "%{_builddir}/%{?buildsubdir}"\
|
|
%{nil}
|
|
|
|
# Template for debug information sub-package.
|
|
diff --git a/scripts/find-debuginfo.sh b/scripts/find-debuginfo.sh
|
|
index 6dcd5a4..2016222 100644
|
|
--- a/scripts/find-debuginfo.sh
|
|
+++ b/scripts/find-debuginfo.sh
|
|
@@ -74,6 +74,9 @@
|
|
# Base given by --unique-debug-src-base
|
|
unique_debug_src_base=
|
|
|
|
+# Number of parallel jobs to spawn
|
|
+n_jobs=1
|
|
+
|
|
BUILDDIR=.
|
|
out=debugfiles.list
|
|
nout=0
|
|
@@ -137,6 +140,13 @@
|
|
-r)
|
|
strip_r=true
|
|
;;
|
|
+ -j)
|
|
+ n_jobs=$2
|
|
+ shift
|
|
+ ;;
|
|
+ -j*)
|
|
+ n_jobs=${1#-j}
|
|
+ ;;
|
|
*)
|
|
BUILDDIR=$1
|
|
shift
|
|
@@ -389,9 +399,56 @@
|
|
fi
|
|
}
|
|
|
|
-while read nlinks inum f; do
|
|
- do_file "$nlinks" "$inum" "$f"
|
|
-done <"$temp/primary"
|
|
+# 16^6 - 1 or about 16 milion files
|
|
+FILENUM_DIGITS=6
|
|
+run_job()
|
|
+{
|
|
+ local jobid=$1 filenum
|
|
+ local SOURCEFILE=$temp/debugsources.$jobid ELFBINSFILE=$temp/elfbins.$jobid
|
|
+
|
|
+ >"$SOURCEFILE"
|
|
+ >"$ELFBINSFILE"
|
|
+ # can't use read -n <n>, because it reads bytes one by one, allowing for
|
|
+ # races
|
|
+ while :; do
|
|
+ filenum=$(dd bs=$(( FILENUM_DIGITS + 1 )) count=1 status=none)
|
|
+ if test -z "$filenum"; then
|
|
+ break
|
|
+ fi
|
|
+ do_file $(sed -n "$(( 0x$filenum )) p" "$temp/primary")
|
|
+ done
|
|
+ echo 0 >"$temp/res.$jobid"
|
|
+}
|
|
+
|
|
+n_files=$(wc -l <"$temp/primary")
|
|
+if [ $n_jobs -gt $n_files ]; then
|
|
+ n_jobs=$n_files
|
|
+fi
|
|
+if [ $n_jobs -le 1 ]; then
|
|
+ while read nlinks inum f; do
|
|
+ do_file "$nlinks" "$inum" "$f"
|
|
+ done <"$temp/primary"
|
|
+else
|
|
+ for ((i = 1; i <= n_files; i++)); do
|
|
+ printf "%0${FILENUM_DIGITS}x\\n" $i
|
|
+ done | (
|
|
+ exec 3<&0
|
|
+ for ((i = 0; i < n_jobs; i++)); do
|
|
+ # The shell redirects stdin to /dev/null for background jobs. Work
|
|
+ # around this by duplicating fd 0
|
|
+ run_job $i <&3 &
|
|
+ done
|
|
+ wait
|
|
+ )
|
|
+ for f in "$temp"/res.*; do
|
|
+ res=$(< "$f")
|
|
+ if [ "$res" != "0" ]; then
|
|
+ exit 1
|
|
+ fi
|
|
+ done
|
|
+ cat "$temp"/debugsources.* >"$SOURCEFILE"
|
|
+ cat "$temp"/elfbins.* >"$ELFBINSFILE"
|
|
+fi
|
|
|
|
# Invoke the DWARF Compressor utility.
|
|
if $run_dwz \
|