include manpage-scan.pl nroff-scan.pl to fix tests 1139 and 1140

Upstream-commit: 54e4c6c396a9987f4232c73b5b4d31c01b16f8ae
This commit is contained in:
Kamil Dudka 2016-05-18 17:11:43 +02:00
parent 9a03eb6087
commit 1e7c2958aa
3 changed files with 394 additions and 0 deletions

View File

@ -6,6 +6,8 @@ License: MIT
Group: Applications/Internet
Source: http://curl.haxx.se/download/%{name}-%{version}.tar.lzma
Source2: curlbuild.h
Source3: https://raw.githubusercontent.com/bagder/curl/curl-7_49_0/tests/manpage-scan.pl
Source4: https://raw.githubusercontent.com/bagder/curl/curl-7_49_0/tests/nroff-scan.pl
# patch making libcurl multilib ready
Patch101: 0101-curl-7.32.0-multilib.patch
@ -116,6 +118,7 @@ documentation of the library, too.
%prep
%setup -q
install -m0644 %{SOURCE3} %{SOURCE4} tests/
# upstream patches

287
manpage-scan.pl Normal file
View File

@ -0,0 +1,287 @@
#!/usr/bin/env perl
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
###########################################################################
#
# Scan symbols-in-version (which is verified to be correct by test 1119), then
# verify that each option mention in there that should have its own man page
# actually does.
#
# In addition, make sure that every current option to curl_easy_setopt,
# curl_easy_getinfo and curl_multi_setopt are also mentioned in their
# corresponding main (index) man page.
#
# src/tool_getparam.c lists all options curl can parse
# docs/curl.1 documents all command line options
# src/tool_help.c outputs all options with curl -h
# - make sure they're all in sync
#
# Output all deviances to stderr.
use strict;
use warnings;
# we may get the dir root pointed out
my $root=$ARGV[0] || ".";
my $syms = "$root/docs/libcurl/symbols-in-versions";
my $curlh = "$root/include/curl/curl.h";
my $errors=0;
# the prepopulated alias list is the CURLINFO_* defines that are used for the
# debug function callback and the fact that they use the same prefix as the
# curl_easy_getinfo options was a mistake.
my %alias = (
'CURLINFO_DATA_IN' => 'none',
'CURLINFO_DATA_OUT' => 'none',
'CURLINFO_END' => 'none',
'CURLINFO_HEADER_IN' => 'none',
'CURLINFO_HEADER_OUT' => 'none',
'CURLINFO_LASTONE' => 'none',
'CURLINFO_NONE' => 'none',
'CURLINFO_SSL_DATA_IN' => 'none',
'CURLINFO_SSL_DATA_OUT' => 'none',
'CURLINFO_TEXT' => 'none'
);
sub scanmanpage {
my ($file, @words) = @_;
open(M, "<$file");
my @m = <M>;
close(M);
foreach my $m (@words) {
my @g = grep(/^\.IP $m/, @m);
if(!$g[0]) {
print STDERR "Missing mention of $m in $file\n";
$errors++;
}
}
}
# check for define alises
open(R, "<$curlh") ||
die "no curl.h";
while(<R>) {
if(/^\#define (CURL(OPT|INFO|MOPT)_\w+) (.*)/) {
$alias{$1}=$3;
}
}
close(R);
my @curlopt;
my @curlinfo;
my @curlmopt;
open(R, "<$syms") ||
die "no input file";
while(<R>) {
chomp;
my $l= $_;
if($l =~ /(CURL(OPT|INFO|MOPT)_\w+) *([0-9.]*) *([0-9.-]*) *([0-9.]*)/) {
my ($opt, $type, $add, $dep, $rem) = ($1, $2, $3, $4, $5);
if($alias{$opt}) {
#print "$opt => $alias{$opt}\n";
}
elsif($rem) {
# $opt was removed in $rem
# so don't check for that
}
else {
if($type eq "OPT") {
push @curlopt, $opt,
}
elsif($type eq "INFO") {
push @curlinfo, $opt,
}
elsif($type eq "MOPT") {
push @curlmopt, $opt,
}
if(! -f "$root/docs/libcurl/opts/$opt.3") {
print STDERR "Missing $opt.3\n";
$errors++;
}
}
}
}
close(R);
scanmanpage("$root/docs/libcurl/curl_easy_setopt.3", @curlopt);
scanmanpage("$root/docs/libcurl/curl_easy_getinfo.3", @curlinfo);
scanmanpage("$root/docs/libcurl/curl_multi_setopt.3", @curlmopt);
# using this hash array, we can whitelist specific options
my %opts = (
# pretend these --no options exists in tool_getparam.c
'--no-alpn' => 1,
'--no-npn' => 1,
'-N, --no-buffer' => 1,
'--no-sessionid' => 1,
'--no-keepalive' => 1,
# pretend these options without -no exist in curl.1 and tool_help.c
'--alpn' => 6,
'--npn' => 6,
'--eprt' => 6,
'--epsv' => 6,
'--keepalive' => 6,
'-N, --buffer' => 6,
'--sessionid' => 6,
# deprecated options do not need to be in curl -h output
'--krb4' => 4,
'--ftp-ssl' => 4,
'--ftp-ssl-reqd' => 4,
# for tests and debug only, can remain hidden
'--test-event' => 6,
'--wdebug' => 6,
);
#########################################################################
# parse the curl code that parses the command line arguments!
open(R, "<$root/src/tool_getparam.c") ||
die "no input file";
my $list;
my @getparam; # store all parsed parameters
while(<R>) {
chomp;
my $l= $_;
if(/struct LongShort aliases/) {
$list=1;
}
elsif($list) {
if( /^ \{([^,]*), *([^ ]*)/) {
my ($s, $l)=($1, $2);
my $sh;
my $lo;
my $title;
if($l =~ /\"(.*)\"/) {
# long option
$lo = $1;
$title="--$lo";
}
if($s =~ /\"(.)\"/) {
# a short option
$sh = $1;
$title="-$sh, $title";
}
push @getparam, $title;
$opts{$title} |= 1;
}
}
}
close(R);
#########################################################################
# parse the curl.1 man page, extract all documented command line options
open(R, "<$root/docs/curl.1") ||
die "no input file";
my @manpage; # store all parsed parameters
while(<R>) {
chomp;
my $l= $_;
if(/^\.IP \"(-[^\"]*)\"/) {
my $str = $1;
my $combo;
if($str =~ /^-(.), --([a-z0-9.-]*)/) {
# figure out the -short, --long combo
$combo = "-$1, --$2";
}
elsif($str =~ /^--([a-z0-9.-]*)/) {
# figure out the --long name
$combo = "--$1";
}
if($combo) {
push @manpage, $combo;
$opts{$combo} |= 2;
}
}
}
close(R);
#########################################################################
# parse the curl code that outputs the curl -h list
open(R, "<$root/src/tool_help.c") ||
die "no input file";
my @toolhelp; # store all parsed parameters
while(<R>) {
chomp;
my $l= $_;
if(/^ \" *(.*)/) {
my $str=$1;
my $combo;
if($str =~ /^-(.), --([a-z0-9.-]*)/) {
# figure out the -short, --long combo
$combo = "-$1, --$2";
}
elsif($str =~ /^--([a-z0-9.-]*)/) {
# figure out the --long name
$combo = "--$1";
}
if($combo) {
push @toolhelp, $combo;
$opts{$combo} |= 4;
}
}
}
close(R);
#
# Now we have three arrays with options to cross-reference.
foreach my $o (keys %opts) {
my $where = $opts{$o};
if($where != 7) {
# this is not in all three places
$errors++;
my $exists;
my $missing;
if($where & 1) {
$exists=" tool_getparam.c";
}
else {
$missing=" tool_getparam.c";
}
if($where & 2) {
$exists.= " curl.1";
}
else {
$missing.= " curl.1";
}
if($where & 4) {
$exists .= " tool_help.c";
}
else {
$missing .= " tool_help.c";
}
print STDERR "$o is not in$missing (but in$exists)\n";
}
}
exit $errors;

104
nroff-scan.pl Normal file
View File

@ -0,0 +1,104 @@
#!/usr/bin/env perl
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
###########################################################################
#
# scan nroff pages to find basic syntactic problems such as unbalanced \f
# codes or references to non-existing curl man pages.
my $docsroot = $ARGV[0];
if(!$docsroot || ($docsroot eq "-g")) {
print "Usage: nroff-scan.pl <docs root dir> [nroff files]\n";
exit;
}
shift @ARGV;
my @f = @ARGV;
my %manp;
sub manpresent {
my ($man) = @_;
if($manp{$man}) {
return 1;
}
elsif(-r "$docsroot/$man" ||
-r "$docsroot/libcurl/$man" ||
-r "$docsroot/libcurl/opts/$man") {
$manp{$man}=1;
return 1;
}
return 0;
}
sub file {
my ($f) = @_;
open(F, "<$f") ||
die "no file";
my $line = 1;
while(<F>) {
chomp;
my $l = $_;
while($l =~ s/\\f(.)([^ ]*)\\f(.)//) {
my ($pre, $str, $post)=($1, $2, $3);
if($post ne "P") {
print STDERR "error: $f:$line: missing \\fP after $str\n";
$errors++;
}
if($str =~ /((libcurl|curl)([^ ]*))\(3\)/i) {
my $man = "$1.3";
if(!manpresent($man)) {
print STDERR "error: $f:$line: refering to non-existing man page $man\n";
$errors++;
}
if($pre ne "I") {
print STDERR "error: $f:$line: use \\fI before $str\n";
$errors++;
}
}
}
if($l =~ /(curl([^ ]*)\(3\))/i) {
print STDERR "error: $f:$line: non-referencing $1\n";
$errors++;
}
if($l =~ /^\.BR (.*)/) {
my $i= $1;
while($i =~ s/((lib|)curl([^ ]*)) *\"\(3\)(,|) *\" *//i ) {
my $man = "$1.3";
if(!manpresent($man)) {
print STDERR "error: $f:$line: refering to non-existing man page $man\n";
$errors++;
}
}
}
$line++;
}
close(F);
}
foreach my $f (@f) {
file($f);
}
exit $errors?1:0;