summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIan Jackson <ijackson@chiark.greenend.org.uk>2018-09-29 15:26:02 +0100
committerIan Jackson <ijackson@chiark.greenend.org.uk>2018-10-01 03:23:39 +0100
commitd5a04ee68cb3ed5c2853bae83b132505b850a5e6 (patch)
treebf5093980833889fdf0c373a0d15e29329132bcf
parent678da9f114859fe07899f72c81336f228ad40717 (diff)
i18n: dgit: mark some messages (2)
Signed-off-by: Ian Jackson <ijackson@chiark.greenend.org.uk>
-rwxr-xr-xdgit321
1 files changed, 175 insertions, 146 deletions
diff --git a/dgit b/dgit
index d194f7a..1fde60b 100755
--- a/dgit
+++ b/dgit
@@ -493,7 +493,7 @@ sub protocol_expect (&$) {
my ($match, $fh) = @_;
local $_;
$_ = <$fh>;
- defined && chomp or badproto_badread $fh, "protocol message";
+ defined && chomp or badproto_badread $fh, __ "protocol message";
if (wantarray) {
my @r = &$match;
return @r if @r;
@@ -501,7 +501,7 @@ sub protocol_expect (&$) {
my $r = &$match;
return $r if $r;
}
- badproto $fh, "\`$_'";
+ badproto $fh, f_ "\`%s'", $_;
}
sub protocol_send_file ($$) {
@@ -522,10 +522,10 @@ sub protocol_send_file ($$) {
sub protocol_read_bytes ($$) {
my ($fh, $nbytes) = @_;
- $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, "bad byte count";
+ $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, __ "bad byte count";
my $d;
my $got = read $fh, $d, $nbytes;
- $got==$nbytes or badproto_badread $fh, "data block";
+ $got==$nbytes or badproto_badread $fh, __ "data block";
return $d;
}
@@ -606,7 +606,8 @@ sub url_get {
progress "downloading $what...";
my $r = $ua->get(@_) or die $!;
return undef if $r->code == 404;
- $r->is_success or fail "failed to fetch $what: ".$r->status_line;
+ $r->is_success or fail f_ "failed to fetch %s: %s",
+ $what, $r->status_line;
return $r->decoded_content(charset => 'none');
}
@@ -617,9 +618,9 @@ sub act_scary () { return !$dryrun_level; }
sub printdone {
if (!$dryrun_level) {
- progress "$us ok: @_";
+ progress f_ "%s ok: %s", $us, "@_";
} else {
- progress "would be ok: @_ (but dry run only)";
+ progress f_ "would be ok: %s (but dry run only)", "@_";
}
}
@@ -643,7 +644,7 @@ sub runcmd_ordryrun_local {
}
}
-our $helpmsg = <<END;
+our $helpmsg = i_ <<END;
main usages:
dgit [dgit-opts] clone [dgit-opts] package [suite] [./dir|/dir]
dgit [dgit-opts] fetch|pull [dgit-opts] [suite]
@@ -662,17 +663,17 @@ important dgit options:
-c<name>=<value> set git config option (used directly by dgit too)
END
-our $later_warning_msg = <<END;
+our $later_warning_msg = i_ <<END;
Perhaps the upload is stuck in incoming. Using the version from git.
END
sub badusage {
- print STDERR "$us: @_\n", $helpmsg or die $!;
+ print STDERR f_ "%s: %s\n%s", $us, "@_", __ $helpmsg or die $!;
finish 8;
}
sub nextarg {
- @ARGV or badusage "too few arguments";
+ @ARGV or badusage __ "too few arguments";
return scalar shift @ARGV;
}
@@ -680,7 +681,7 @@ sub pre_help () {
not_necessarily_a_tree();
}
sub cmd_help () {
- print $helpmsg or die $!;
+ print __ $helpmsg or die $!;
finish 0;
}
@@ -782,8 +783,9 @@ sub git_get_config ($) {
"undef")."\n"
if $debuglevel >= 4;
$l or next;
- @$l==1 or badcfg "multiple values for $c".
- " (in $src git config)" if @$l > 1;
+ @$l==1 or badcfg
+ f_ "multiple values for %s (in %s git config)", $c, $src
+ if @$l > 1;
return $l->[0];
}
return undef;
@@ -801,8 +803,10 @@ sub cfg {
return $dv;
}
}
- badcfg "need value for one of: @_\n".
- "$us: distro or suite appears not to be (properly) supported";
+ badcfg f_
+ "need value for one of: %s\n".
+ "%s: distro or suite appears not to be (properly) supported",
+ "@_", $us;
}
sub not_necessarily_a_tree () {
@@ -841,7 +845,8 @@ sub access_nomdistro () {
my $base = access_basedistro();
my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base;
$r =~ m/^$distro_re$/ or badcfg
- "bad syntax for (nominal) distro \`$r' (does not match /^$distro_re$/)";
+ f_ "bad syntax for (nominal) distro \`%s' (does not match %s)",
+ $r, "/^$distro_re$/";
return $r;
}
@@ -855,7 +860,7 @@ sub access_quirk () {
$re =~ s/[^-0-9a-z_\%*()]/\\$&/ig;
$re =~ s/\*/.*/g;
$re =~ s/\%/([-0-9a-z_]+)/
- or $re =~ m/[()]/ or badcfg "backports-quirk needs \% or ( )";
+ or $re =~ m/[()]/ or badcfg __ "backports-quirk needs \% or ( )";
if ($isuite =~ m/^$re$/) {
return ('backports',"$basedistro-backports",$1);
}
@@ -871,7 +876,8 @@ sub parse_cfg_bool ($$$) {
return
$v =~ m/^[ty1]/ ? 1 :
$v =~ m/^[fn0]/ ? 0 :
- badcfg "$what needs t (true, y, 1) or f (false, n, 0) not \`$v'";
+ badcfg f_ "%s needs t (true, y, 1) or f (false, n, 0) not \`%s'",
+ $what, $v;
}
sub access_forpush_config () {
@@ -889,7 +895,8 @@ sub access_forpush_config () {
$v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0
$v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1
$v =~ m/^[a]/ ? '' : # auto, forpush = ''
- badcfg "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
+ badcfg __
+ "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)";
}
sub access_forpush () {
@@ -898,12 +905,12 @@ sub access_forpush () {
}
sub pushing () {
- confess 'internal error '.Dumper($access_forpush)," ?" if
+ confess +(__ 'internal error').' '.Dumper($access_forpush)," ?" if
defined $access_forpush and !$access_forpush;
- badcfg "pushing but distro is configured readonly"
+ badcfg __ "pushing but distro is configured readonly"
if access_forpush_config() eq '0';
$access_forpush = 1;
- $supplementary_message = <<'END' unless $we_are_responder;
+ $supplementary_message = __ <<'END' unless $we_are_responder;
Push failed, before we got started.
You can retry the push, after fixing the problem, if you like.
END
@@ -1069,7 +1076,7 @@ our %rmad;
sub archive_query ($;@) {
my ($method) = shift @_;
- fail "this operation does not support multiple comma-separated suites"
+ fail __ "this operation does not support multiple comma-separated suites"
if $isuite =~ m/,/;
my $query = access_cfg('archive-query','RETURN-UNDEF');
$query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'";
@@ -1115,8 +1122,9 @@ sub archive_api_query_cmd ($) {
fail "for $url: stat $key: $!" unless $!==ENOENT;
next;
}
- fail "config requested specific TLS key but do not know".
- " how to get curl to use exactly that EE key ($key)";
+ fail f_ "config requested specific TLS key but do not know".
+ " how to get curl to use exactly that EE key (%s)",
+ $key;
# push @cmd, "--cacert", $key, "--capath", "/dev/enoent";
# # Sadly the above line does not work because of changes
# # to gnutls. The real fix for #790093 may involve
@@ -1135,7 +1143,7 @@ sub archive_api_query_cmd ($) {
sub api_query ($$;$) {
use JSON;
my ($data, $subpath, $ok404) = @_;
- badcfg "ftpmasterapi archive query method takes no data part"
+ badcfg __ "ftpmasterapi archive query method takes no data part"
if length $data;
my @cmd = archive_api_query_cmd($subpath);
my $url = $cmd[$#cmd];
@@ -1143,11 +1151,11 @@ sub api_query ($$;$) {
my $json = cmdoutput @cmd;
unless ($json =~ s/\d+\d+\d$//) {
failedcmd_report_cmd undef, @cmd;
- fail "curl failed to print 3-digit HTTP code";
+ fail __ "curl failed to print 3-digit HTTP code";
}
my $code = $&;
return undef if $code eq '404' && $ok404;
- fail "fetch of $url gave HTTP code $code"
+ fail f_ "fetch of %s gave HTTP code %s", $url, $code
unless $url =~ m#^file://# or $code =~ m/^2/;
return decode_json($json);
}
@@ -1163,15 +1171,17 @@ sub canonicalise_suite_ftpmasterapi {
} qw(codename name);
push @matched, $entry;
}
- fail "unknown suite $isuite, maybe -d would help" unless @matched;
+ fail f_ "unknown suite %s, maybe -d would help", $isuite
+ unless @matched;
my $cn;
eval {
- @matched==1 or die "multiple matches for suite $isuite\n";
+ @matched==1 or die f_ "multiple matches for suite %s\n", $isuite;
$cn = "$matched[0]{codename}";
- defined $cn or die "suite $isuite info has no codename\n";
- $cn =~ m/^$suite_re$/ or die "suite $isuite maps to bad codename\n";
+ defined $cn or die f_ "suite %s info has no codename\n", $isuite;
+ $cn =~ m/^$suite_re$/
+ or die f_ "suite %s maps to bad codename\n", $isuite;
};
- die "bad ftpmaster api response: $@\n".Dumper(\@matched)
+ die +(__ "bad ftpmaster api response: ")."$@\n".Dumper(\@matched)
if length $@;
return $cn;
}
@@ -1185,18 +1195,18 @@ sub archive_query_ftpmasterapi {
eval {
my $vsn = "$entry->{version}";
my ($ok,$msg) = version_check $vsn;
- die "bad version: $msg\n" unless $ok;
+ die f_ "bad version: %s\n", $msg unless $ok;
my $component = "$entry->{component}";
- $component =~ m/^$component_re$/ or die "bad component";
+ $component =~ m/^$component_re$/ or die __ "bad component";
my $filename = "$entry->{filename}";
$filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]#
- or die "bad filename";
+ or die __ "bad filename";
my $sha256sum = "$entry->{sha256sum}";
- $sha256sum =~ m/^[0-9a-f]+$/ or die "bad sha256sum";
+ $sha256sum =~ m/^[0-9a-f]+$/ or die __ "bad sha256sum";
push @rows, [ $vsn, "/pool/$component/$filename",
$digester, $sha256sum ];
};
- die "bad ftpmaster api response: $@\n".Dumper($entry)
+ die +(__ "bad ftpmaster api response: ")."$@\n".Dumper($entry)
if length $@;
}
@rows = sort { -version_compare($a->[0],$b->[0]) } @rows;
@@ -1235,15 +1245,15 @@ sub aptget_cache_clean {
sub aptget_lock_acquire () {
my $lockfile = "$aptget_base/lock";
- open APTGET_LOCK, '>', $lockfile or die "open $lockfile: $!";
- flock APTGET_LOCK, LOCK_EX or die "lock $lockfile: $!";
+ open APTGET_LOCK, '>', $lockfile or confess "open $lockfile: $!";
+ flock APTGET_LOCK, LOCK_EX or confess "lock $lockfile: $!";
}
sub aptget_prep ($) {
my ($data) = @_;
return if defined $aptget_base;
- badcfg "aptget archive query method takes no data part"
+ badcfg __ "aptget archive query method takes no data part"
if length $data;
my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache";
@@ -1258,7 +1268,7 @@ sub aptget_prep ($) {
ensuredir $aptget_base;
my $quoted_base = $aptget_base;
- die "$quoted_base contains bad chars, cannot continue"
+ confess "$quoted_base contains bad chars, cannot continue"
if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/
ensuredir $aptget_base;
@@ -1274,7 +1284,7 @@ sub aptget_prep ($) {
cfg_apply_map(\$aptsuites, 'suite map',
access_cfg('aptget-suite-map', 'RETURN-UNDEF'));
- open SRCS, ">", "$aptget_base/$sourceslist" or die $!;
+ open SRCS, ">", "$aptget_base/$sourceslist" or confess $!;
printf SRCS "deb-src %s %s %s\n",
access_cfg('mirror'),
$aptsuites,
@@ -1326,13 +1336,13 @@ END
my @inreleasefiles = grep { m#/InRelease$# } @releasefiles;
@releasefiles = @inreleasefiles if @inreleasefiles;
if (!@releasefiles) {
- fail <<END;
-apt seemed to not to update dgit's cached Release files for $isuite.
-(Perhaps $cache
+ fail f_ <<END, $isuite, $cache;
+apt seemed to not to update dgit's cached Release files for %s.
+(Perhaps %s
is on a filesystem mounted `noatime'; if so, please use `relatime'.)
END
}
- die "apt updated too many Release files (@releasefiles), erk"
+ confess "apt updated too many Release files (@releasefiles), erk"
unless @releasefiles == 1;
($aptget_releasefile) = @releasefiles;
@@ -1348,8 +1358,9 @@ sub canonicalise_suite_aptget {
my $val = $release->{$name};
if (defined $val) {
printdebug "release file $name: $val\n";
- $val =~ m/^$suite_re$/o or fail
- "Release file ($aptget_releasefile) specifies intolerable $name";
+ $val =~ m/^$suite_re$/o or fail f_
+ "Release file (%s) specifies intolerable %s",
+ $aptget_releasefile, $name;
cfg_apply_map(\$val, 'suite rmap',
access_cfg('aptget-suite-rmap', 'RETURN-UNDEF'));
return $val
@@ -1376,8 +1387,9 @@ sub archive_query_aptget {
aptget_aptget(), qw(--download-only --only-source source), $package;
my @dscs = <$aptget_base/source/*.dsc>;
- fail "apt-get source did not produce a .dsc" unless @dscs;
- fail "apt-get source produced several .dscs (@dscs)" unless @dscs==1;
+ fail __ "apt-get source did not produce a .dsc" unless @dscs;
+ fail f_ "apt-get source produced several .dscs (%s)", "@dscs"
+ unless @dscs==1;
my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1;
@@ -1391,6 +1403,7 @@ sub file_in_archive_aptget () { return undef; }
sub package_not_wholly_new_aptget () { return undef; }
#---------- `dummyapicat' archive query method ----------
+# (untranslated, because this is for testing purposes etc.)
sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; }
sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; }
@@ -1491,6 +1504,7 @@ sub file_in_archive_madison { return undef; }
sub package_not_wholly_new_madison { return undef; }
#---------- `sshpsql' archive query method ----------
+# (untranslated, because this is obsolete)
sub sshpsql ($$$) {
my ($data,$runeinfo,$sql) = @_;
@@ -1569,6 +1583,7 @@ sub file_in_archive_sshpsql ($$$) { return undef; }
sub package_not_wholly_new_sshpsql ($$$) { return undef; }
#---------- `dummycat' archive query method ----------
+# (untranslated, because this is for testing purposes etc.)
sub canonicalise_suite_dummycat ($$) {
my ($proto,$data) = @_;
@@ -1614,6 +1629,7 @@ sub file_in_archive_dummycat () { return undef; }
sub package_not_wholly_new_dummycat () { return undef; }
#---------- tag format handling ----------
+# (untranslated, because everything should be new tag format by now)
sub access_cfg_tagformats () {
split /\,/, access_cfg('dgit-tag-format');
@@ -1668,12 +1684,12 @@ sub select_tagformat () {
sub canonicalise_suite () {
return if defined $csuite;
- fail "cannot operate on $isuite suite" if $isuite eq 'UNRELEASED';
+ fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED';
$csuite = archive_query('canonicalise_suite');
if ($isuite ne $csuite) {
- progress "canonical suite name for $isuite is $csuite";
+ progress f_ "canonical suite name for %s is %s", $isuite, $csuite;
} else {
- progress "canonical suite name is $csuite";
+ progress f_ "canonical suite name is %s", $csuite;
}
}
@@ -1693,13 +1709,13 @@ sub get_archive_dsc () {
$digester->add($dscdata);
my $got = $digester->hexdigest();
$got eq $digest or
- fail "$dscurl has hash $got but".
- " archive told us to expect $digest";
+ fail f_ "%s has hash %s but archive told us to expect %s",
+ $dscurl, $got, $digest;
}
parse_dscdata();
my $fmt = getfield $dsc, 'Format';
$format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)],
- "unsupported source format $fmt, sorry";
+ f_ "unsupported source format %s, sorry", $fmt;
$dsc_checked = !!$digester;
printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n";
@@ -1726,7 +1742,8 @@ sub check_for_git () {
# NB that if we are pushing, $usedistro will be $distro/push
$instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert");
$instead_distro =~ s{^/}{ access_basedistro()."/" }e;
- progress "diverting to $divert (using config for $instead_distro)";
+ progress f_ "diverting to %s (using config for %s)",
+ $divert, $instead_distro;
return check_for_git();
}
failedcmd @cmd unless defined $r and $r =~ m/^[01]$/;
@@ -1742,7 +1759,7 @@ sub check_for_git () {
# curl -sS -I with https_proxy prints
# HTTP/1.0 200 Connection established
$result =~ m/^\S+ (404|200) /s or
- fail "unexpected results from git check query - ".
+ fail +(__ "unexpected results from git check query - ").
Dumper($prefix, $result);
my $code = $1;
if ($code eq '404') {
@@ -1757,7 +1774,7 @@ sub check_for_git () {
} elsif ($how eq 'false') {
return 0;
} else {
- badcfg "unknown git-check \`$how'";
+ badcfg f_ "unknown git-check \`%s'", $how;
}
}
@@ -1772,7 +1789,7 @@ sub create_remote_git_repo () {
} elsif ($how eq 'true') {
# nothing to do
} else {
- badcfg "unknown git-create \`$how'";
+ badcfg f_ "unknown git-create \`%s'", $how;
}
}
@@ -1809,8 +1826,8 @@ sub remove_stray_gits ($) {
local $/="\0";
while (<GITS>) {
chomp or die;
- print STDERR "$us: warning: removing from $what: ",
- (messagequote $_), "\n";
+ print STDERR f_ "%s: warning: removing from %s: %s\n",
+ $us, $what, (messagequote $_);
rmtree $_;
}
}
@@ -1822,7 +1839,7 @@ sub mktree_in_ud_from_only_subdir ($;$) {
# changes into the subdir
my (@dirs) = <*/.>;
- die "expected one subdir but found @dirs ?" unless @dirs==1;
+ confess "expected one subdir but found @dirs ?" unless @dirs==1;
$dirs[0] =~ m#^([^/]+)/\.$# or die;
my $dir = $1;
changedir $dir;
@@ -1855,7 +1872,7 @@ sub dsc_files_info () {
foreach (split /\n/, $field) {
next unless m/\S/;
m/^(\w+) (\d+) (\S+)$/ or
- fail "could not parse .dsc $fname line \`$_'";
+ fail f_ "could not parse .dsc %s line \`%s'", $fname, $_;
my $digester = eval "$module"."->$method;" or die $@;
push @out, {
Hash => $1,
@@ -1866,8 +1883,8 @@ sub dsc_files_info () {
}
return @out;
}
- fail "missing any supported Checksums-* or Files field in ".
- $dsc->get_option('name');
+ fail f_ "missing any supported Checksums-* or Files field in %s",
+ $dsc->get_option('name');
}
sub dsc_files () {
@@ -1911,8 +1928,9 @@ sub files_compare_inputs (@) {
if (defined $$re) {
$fchecked{$f}{$in_name} = 1;
$$re eq $info or
- fail "hash or size of $f varies in $fname fields".
- " (between: ".$showinputs->().")";
+ fail f_
+ "hash or size of %s varies in %s fields (between: %s)",
+ $f, $fname, $showinputs->();
} else {
$$re = $info;
}
@@ -1920,17 +1938,18 @@ sub files_compare_inputs (@) {
@files = sort @files;
$expected_files //= \@files;
"@$expected_files" eq "@files" or
- fail "file list in $in_name varies between hash fields!";
+ fail f_ "file list in %s varies between hash fields!",
+ $in_name;
}
$expected_files or
- fail "$in_name has no files list field(s)";
+ fail f_ "%s has no files list field(s)", $in_name;
}
printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record)
if $debuglevel>=2;
grep { keys %$_ == @$inputs-1 } values %fchecked
- or fail "no file appears in all file lists".
- " (looked in: ".$showinputs->().")";
+ or fail f_ "no file appears in all file lists (looked in: %s)",
+ $showinputs->();
}
sub is_orig_file_in_dsc ($$) {
@@ -1969,7 +1988,7 @@ sub test_source_only_changes ($) {
$l =~ m/\S+$/ or next;
# \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages
unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) {
- print "purportedly source-only changes polluted by $&\n";
+ print f_ "purportedly source-only changes polluted by %s\n", $&;
return 0;
}
}
@@ -1982,7 +2001,7 @@ sub changes_update_origs_from_dsc ($$$$) {
printdebug "checking origs needed ($upstreamvsn)...\n";
$_ = getfield $changes, 'Files';
m/^\w+ \d+ (\S+ \S+) \S+$/m or
- fail "cannot find section/priority from .changes Files field";
+ fail __ "cannot find section/priority from .changes Files field";
my $placementinfo = $1;
my %changed;
printdebug "checking origs needed placement '$placementinfo'...\n";
@@ -1994,7 +2013,7 @@ sub changes_update_origs_from_dsc ($$$$) {
printdebug "origs $file is_orig\n";
my $have = archive_query('file_in_archive', $file);
if (!defined $have) {
- print STDERR <<END;
+ print STDERR __ <<END;
archive does not support .orig check; hope you used --ch:--sa/-sd if needed
END
return;
@@ -2011,25 +2030,27 @@ END
$_ = $dsc->{$fname};
next unless defined;
m/^(\w+) .* \Q$file\E$/m or
- fail ".dsc $fname missing entry for $file";
+ fail f_ ".dsc %s missing entry for %s", $fname, $file;
if ($h->{$archivefield} eq $1) {
$same++;
} else {
- push @differ,
- "$archivefield: $h->{$archivefield} (archive) != $1 (local .dsc)";
+ push @differ, f_
+ "%s: %s (archive) != %s (local .dsc)",
+ $archivefield, $h->{$archivefield}, $1;
}
}
- die "$file ".Dumper($h)." ?!" if $same && @differ;
+ confess "$file ".Dumper($h)." ?!" if $same && @differ;
$found_same++
if $same;
- push @found_differ, "archive $h->{filename}: ".join "; ", @differ
+ push @found_differ,
+ f_ "archive %s: %s", $h->{filename}, join "; ", @differ
if @differ;
}
printdebug "origs $file f.same=$found_same".
" #f._differ=$#found_differ\n";
if (@found_differ && !$found_same) {
fail join "\n",
- "archive contains $file with different checksum",
+ (f_ "archive contains %s with different checksum", $file),
@found_differ;
}
# Now we edit the changes file to add or remove it
@@ -2047,7 +2068,7 @@ END
$dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?";
my $extra = $1;
$extra =~ s/ \d+ /$&$placementinfo /
- or die "$fname $extra >$dsc_data< ?"
+ or confess "$fname $extra >$dsc_data< ?"
if $fname eq 'Files';
$changes->{$fname} .= "\n". $extra;
$changed{$file} = "added";
@@ -2056,7 +2077,7 @@ END
}
if (%changed) {
foreach my $file (keys %changed) {
- progress sprintf
+ progress f_
"edited .changes for archive .orig contents: %s %s",
$changed{$file}, $file;
}
@@ -2065,10 +2086,11 @@ END
if (act_local()) {
rename $chtmp,$changesfile or die "$changesfile $!";
} else {
- progress "[new .changes left in $changesfile]";
+ progress f_ "[new .changes left in %s]", $changesfile;
}
} else {
- progress "$changesfile already has appropriate .orig(s) (if any)";
+ progress f_ "%s already has appropriate .orig(s) (if any)",
+ $changesfile;
}
}
@@ -2091,8 +2113,9 @@ sub clogp_authline ($) {
my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date');
my $authline = "$author $date";
$authline =~ m/$git_authline_re/o or
- fail "unexpected commit author line format \`$authline'".
- " (was generated from changelog Maintainer field)";
+ fail f_ "unexpected commit author line format \`%s'".
+ " (was generated from changelog Maintainer field)",
+ $authline;
return ($1,$2,$3) if wantarray;
return $authline;
}
@@ -2105,14 +2128,14 @@ sub vendor_patches_distro ($$) {
printdebug "checking for vendor-specific $series ($what)\n";
if (!open SERIES, "<", $series) {
- die "$series $!" unless $!==ENOENT;
+ confess "$series $!" unless $!==ENOENT;
return;
}
while (<SERIES>) {
next unless m/\S/;
next if m/^\s+\#/;
- print STDERR <<END;
+ print STDERR __ <<END;
Unfortunately, this source package uses a feature of dpkg-source where
the same source package unpacks to different source code on different
@@ -2125,8 +2148,9 @@ different packages, if different distros are supposed to have
different code).
END
- fail "Found active distro-specific series file for".
- " $checkdistro ($what): $series, cannot continue";
+ fail f_ "Found active distro-specific series file for".
+ " %s (%s): %s, cannot continue",
+ $checkdistro, $what, $series;
}
die "$series $!" if SERIES->error;
close SERIES;
@@ -2155,11 +2179,11 @@ sub check_for_vendor_patches () {
use Dpkg::Vendor;
vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR");
vendor_patches_distro(Dpkg::Vendor::get_current_vendor(),
- "Dpkg::Vendor \`current vendor'");
+ __ "Dpkg::Vendor \`current vendor'");
vendor_patches_distro(access_basedistro(),
- "(base) distro being accessed");
+ __ "(base) distro being accessed");
vendor_patches_distro(access_nomdistro(),
- "(nominal) distro being accessed");
+ __ "(nominal) distro being accessed");
}
sub generate_commits_from_dsc () {
@@ -2180,12 +2204,12 @@ sub generate_commits_from_dsc () {
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
- fail "accessing $buildproductsdir/$f,fetch: $!";
+ fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!;
} elsif (link_ltarget $upper_f, $f) {
printdebug "linked.\n";
} elsif ((printdebug "($!) "),
$! != ENOENT) {
- fail "accessing $buildproductsdir/$f: $!";
+ fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!;
} else {
printdebug "absent.\n";
}
@@ -2200,14 +2224,14 @@ sub generate_commits_from_dsc () {
printdebug "linked.\n";
} elsif ((printdebug "($!) "),
$! != EEXIST) {
- fail "saving $buildproductsdir/$f: $!";
+ fail f_ "saving %s: %s", "$buildproductsdir/$f", $!;
} elsif (!$refetched) {
printdebug "no need.\n";
} elsif (link $f, "$upper_f,fetch") {
printdebug "linked (using ...,fetch).\n";
} elsif ((printdebug "($!) "),
$! != EEXIST) {
- fail "saving $buildproductsdir/$f,fetch: $!";
+ fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $!;
} else {
printdebug "cannot.\n";
}
@@ -2273,7 +2297,7 @@ sub generate_commits_from_dsc () {
chdir "_unpack-tar" or die $!;
open STDIN, "<&", $input or die $!;
exec @tarcmd;
- die "dgit (child): exec $tarcmd[0]: $!";
+ die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!;
}
$!=0; (waitpid $tar_pid, 0) == $tar_pid or die $!;
!$? or failedcmd @tarcmd;
@@ -2337,7 +2361,7 @@ sub generate_commits_from_dsc () {
push @cmd, qw(-x --), $dscfn;
runcmd @cmd;
- my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package");
+ my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package");
if (madformat $dsc->{format}) {
check_for_vendor_patches();
}
@@ -2355,7 +2379,7 @@ sub generate_commits_from_dsc () {
my $r1clogp;
printdebug "import clog search...\n";
- parsechangelog_loop \@clogcmd, "package changelog", sub {
+ parsechangelog_loop \@clogcmd, (__ "package changelog"), sub {
my ($thisstanza, $desc) = @_;
no warnings qw(exiting);
@@ -2395,7 +2419,7 @@ sub generate_commits_from_dsc () {
printdebug "import clog $r1clogp->{version} becomes r1\n";
};
- $clogp or fail "package changelog has no entries!";
+ $clogp or fail __ "package changelog has no entries!";
my $authline = clogp_authline $clogp;
my $changes = getfield $clogp, 'Changes';
@@ -2414,12 +2438,13 @@ sub generate_commits_from_dsc () {
foreach my $tt (@tartrees) {
printdebug "import tartree $tt->{F} $tt->{Tree}\n";
+ my $mbody = f_ "Import %s", $tt->{F};
$tt->{Commit} = make_commit_text($tt->{Orig} ? <<END_O : <<END_T);
tree $tt->{Tree}
author $r1authline
committer $r1authline
-Import $tt->{F}
+$mbody
[dgit import orig $tt->{F}]
END_O
@@ -2427,7 +2452,7 @@ tree $tt->{Tree}
author $authline
committer $authline
-Import $tt->{F}
+$mbody
[dgit import tarball $package $cversion $tt->{F}]
END_T
@@ -2494,7 +2519,7 @@ END
chomp $@;
progress "warning: $@";
$path = "$absurdity:$path";
- progress "$us: trying slow absurd-git-apply...";
+ progress f_ "%s: trying slow absurd-git-apply...", $us;
rename "../../gbp-pq-output","../../gbp-pq-output.0"
or $!==ENOENT
or die $!;
@@ -2513,19 +2538,19 @@ END
'exec >/dev/null 2>>../../gbp-pq-output', @showcmd;
debugcmd "+",@realcmd;
if (system @realcmd) {
- die +(shellquote @showcmd).
- " failed: ".
- failedcmd_waitstatus()."\n";
+ die f_ "%s failed: %s\n",
+ +(shellquote @showcmd),
+ failedcmd_waitstatus();
}
my $gapplied = git_rev_parse('HEAD');
my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:);
$gappliedtree eq $dappliedtree or
- fail <<END;
+ fail f_ <<END, $gapplied, $gappliedtree, $dappliedtree;
gbp-pq import and dpkg-source disagree!
- gbp-pq import gave commit $gapplied
- gbp-pq import gave tree $gappliedtree
- dpkg-source --before-build gave tree $dappliedtree
+ gbp-pq import gave commit %s
+ gbp-pq import gave tree %s
+ dpkg-source --before-build gave tree %s
END
$rawimport_hash = $gapplied;
};
@@ -2537,11 +2562,11 @@ END
}
}
- progress "synthesised git commit from .dsc $cversion";
+ progress f_ "synthesised git commit from .dsc %s", $cversion;
my $rawimport_mergeinput = {
Commit => $rawimport_hash,
- Info => "Import of source package",
+ Info => __ "Import of source package",
};
my @output = ($rawimport_mergeinput);
@@ -2552,16 +2577,18 @@ END
version_compare($oversion, $cversion);
if ($vcmp < 0) {
@output = ($rawimport_mergeinput, $lastpush_mergeinput,
- { Message => <<END, ReverseParents => 1 });
-Record $package ($cversion) in archive suite $csuite
+ { ReverseParents => 1,
+ Message => (f_ <<END, $package, $cversion, $csuite) });
+Record %s (%s) in archive suite %s
END
} elsif ($vcmp > 0) {
- print STDERR <<END or die $!;
+ print STDERR f_ <<END, $cversion, $oversion,
-Version actually in archive: $cversion (older)
-Last version pushed with dgit: $oversion (newer or same)
-$later_warning_msg
+Version actually in archive: %s (older)
+Last version pushed with dgit: %s (newer or same)
+%s
END
+ __ $later_warning_msg or die $!;
@output = $lastpush_mergeinput;
} else {
# Same version. Use what's in the server git branch,
@@ -2598,15 +2625,15 @@ sub complete_file_from_dsc ($$;$) {
if (stat_exists $tf) {
if ($checkhash->()) {
- progress "using existing $f";
+ progress f_ "using existing %s", $f;
return 1;
}
if (!$refetched) {
- fail "file $f has hash $got but .dsc".
- " demands hash $fi->{Hash} ".
- "(perhaps you should delete this file?)";
+ fail f_ "file %s has hash %s but .dsc demands hash %s".
+ " (perhaps you should delete this file?)",
+ $f, $got, $fi->{Hash};
}
- progress "need to fetch correct version of $f";
+ progress f_ "need to fetch correct version of %s", $f;
unlink $tf or die "$tf $!";
$$refetched = 1;
} else {
@@ -2622,9 +2649,9 @@ sub complete_file_from_dsc ($$;$) {
return 0 if !act_local();
$checkhash->() or
- fail "file $f has hash $got but .dsc".
- " demands hash $fi->{Hash} ".
- "(got wrong file from archive!)";
+ fail f_ "file %s has hash %s but .dsc demands hash %s".
+ " (got wrong file from archive!)",
+ $f, $got, $fi->{Hash};
return 1;
}
@@ -2717,7 +2744,7 @@ sub git_lrfetch_sane {
for (;;) {
printdebug "git_lrfetch_sane iteration $fetch_iteration\n";
if (++$fetch_iteration > 10) {
- fail "too many iterations trying to get sane fetch!";
+ fail __ "too many iterations trying to get sane fetch!";
}
my @look = map { "refs/$_" } @specs;
@@ -2731,8 +2758,8 @@ sub git_lrfetch_sane {
m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?";
my ($objid,$rrefname) = ($1,$2);
if (!$wanted_rref->($rrefname)) {
- print STDERR <<END;
-warning: git ls-remote @look reported $rrefname; this is silly, ignoring it.
+ print STDERR f_ <<END, "@look", $rrefname;
+warning: git ls-remote %s reported %s; this is silly, ignoring it.
END
next;
}
@@ -2775,8 +2802,8 @@ END
git-fetch @fspecs created $lrefname which git ls-remote @look didn't list.
END
} else {
- print STDERR <<END
-warning: git fetch @fspecs created $lrefname; this is silly, deleting it.
+ print STDERR f_ <<END, "@fspecs", $lrefname
+warning: git fetch %s created %s; this is silly, deleting it.
END
}
runcmd_ordryrun_local @git, qw(update-ref -d), $lrefname;
@@ -2790,14 +2817,14 @@ END
my $want = $wantr{$rrefname};
next if $got eq $want;
if (!defined $objgot{$want}) {
- fail <<END unless act_local();
+ fail __ <<END unless act_local();
--dry-run specified but we actually wanted the results of git fetch,
so this is not going to work. Try running dgit fetch first,
or using --damp-run instead of --dry-run.
END
- print STDERR <<END;
-warning: git ls-remote suggests we want $lrefname
-warning: and it should refer to $want
+ print STDERR f_ <<END, $lrefname, $want;
+warning: git ls-remote suggests we want %s
+warning: and it should refer to %s
warning: but git fetch didn't fetch that object to any relevant ref.
warning: This may be due to a race with someone updating the server.
warning: Will try again...
@@ -3161,13 +3188,14 @@ sub fetch_from_archive () {
if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) {
@mergeinputs = $dsc_mergeinput
} elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) {
- print STDERR <<END or die $!;
+ print STDERR f_ <<END, $dsc_hash, $lastpush_hash,
Git commit in archive is behind the last version allegedly pushed/uploaded.
-Commit referred to by archive: $dsc_hash
-Last version pushed with dgit: $lastpush_hash
-$later_warning_msg
+Commit referred to by archive: %s
+Last version pushed with dgit: %s
+%s
END
+ __ $later_warning_msg or die $!;
@mergeinputs = ($lastpush_mergeinput);
} else {
# Archive has .dsc which is not a descendant of the last dgit
@@ -3197,11 +3225,12 @@ END
} elsif ($lastpush_hash) {
# only in git, not in the archive yet
@mergeinputs = ($lastpush_mergeinput);
- print STDERR <<END or die $!;
+ print STDERR f_ <<END,
Package not found in the archive, but has allegedly been pushed using dgit.
-$later_warning_msg
+%s
END
+ __ $later_warning_msg or die $!;
} else {
printdebug "nothing found!\n";
if (defined $skew_warning_vsn) {
@@ -7231,7 +7260,7 @@ print STDERR "DRY RUN ONLY\n" if $dryrun_level > 1;
print STDERR "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n"
if $dryrun_level == 1;
if (!@ARGV) {
- print STDERR $helpmsg or die $!;
+ print STDERR __ $helpmsg or die $!;
finish 8;
}
$cmd = $subcommand = shift @ARGV;