#!/usr/bin/perl -w # dgit # Integration between git and Debian-style archives # # Copyright (C)2013-2018 Ian Jackson # Copyright (C)2017-2018 Sean Whitton # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . END { $? = $Debian::Dgit::ExitStatus::desired // -1; }; use Debian::Dgit::ExitStatus; use Debian::Dgit::I18n; use strict; use Debian::Dgit qw(:DEFAULT :playground); setup_sigwarn(); use IO::Handle; use Data::Dumper; use LWP::UserAgent; use Dpkg::Control::Hash; use File::Path; use File::Spec; use File::Temp qw(tempdir); use File::Basename; use Dpkg::Version; use Dpkg::Compression; use Dpkg::Compression::Process; use POSIX; use Locale::gettext; use IPC::Open2; use Digest::SHA; use Digest::MD5; use List::MoreUtils qw(pairwise); use Text::Glob qw(match_glob); use Fcntl qw(:DEFAULT :flock); use Carp; use Debian::Dgit; our $our_version = 'UNRELEASED'; ###substituted### our $absurdity = undef; ###substituted### our @rpushprotovsn_support = qw(6 5 4); # Reverse order! our $protovsn; our $cmd; our $subcommand; our $isuite; our $idistro; our $package; our @ropts; our $sign = 1; our $dryrun_level = 0; our $changesfile; our $buildproductsdir; our $bpd_glob; our $new_package = 0; our $includedirty = 0; our $rmonerror = 1; our @deliberatelies; our %previously; our $existing_package = 'dpkg'; our $cleanmode; our $changes_since_version; our $rmchanges; our $overwrite_version; # undef: not specified; '': check changelog our $quilt_mode; our $quilt_upstream_commitish; our $quilt_upstream_commitish_used; our $quilt_upstream_commitish_message; our $quilt_options_re = 'gbp|dpm|baredebian(?:\+tarball|\+git)?'; our $quilt_modes_re = "linear|smash|auto|nofix|nocheck|unapplied|$quilt_options_re"; our $splitview_mode; our $splitview_modes_re = qr{auto|always|never}; our $dodep14tag; our %internal_object_save; our $we_are_responder; our $we_are_initiator; our $initiator_tempdir; our $patches_applied_dirtily = 00; our $chase_dsc_distro=1; our %forceopts = map { $_=>0 } qw(unrepresentable unsupported-source-format dsc-changes-mismatch changes-origs-exactly uploading-binaries uploading-source-only import-gitapply-absurd import-gitapply-no-absurd import-dsc-with-dgit-field); our %format_ok = map { $_=>1 } ("1.0","3.0 (native)","3.0 (quilt)"); our $cleanmode_re = qr{(?: dpkg-source (?: -d )? (?: ,no-check | ,all-check )? | (?: git | git-ff ) (?: ,always )? | check (?: ,ignores )? | none )}x; our $git_authline_re = '^([^<>]+) \<(\S+)\> (\d+ [-+]\d+)$'; our $splitbraincache = 'dgit-intern/quilt-cache'; our $rewritemap = 'dgit-rewrite/map'; our @dpkg_source_ignores = qw(-i(?:^|/)\.git(?:/|$) -I.git); our (@git) = qw(git); our (@dget) = qw(dget); our (@curl) = (qw(curl --proto-redir), '-all,http,https', qw(-L)); our (@dput) = qw(dput); our (@debsign) = qw(debsign); our (@gpg) = qw(gpg); our (@sbuild) = (qw(sbuild --no-source)); our (@ssh) = 'ssh'; our (@dgit) = qw(dgit); our (@git_debrebase) = qw(git-debrebase); our (@aptget) = qw(apt-get); our (@aptcache) = qw(apt-cache); our (@dpkgbuildpackage) = (qw(dpkg-buildpackage), @dpkg_source_ignores); our (@dpkgsource) = (qw(dpkg-source), @dpkg_source_ignores); our (@dpkggenchanges) = qw(dpkg-genchanges); our (@mergechanges) = qw(mergechanges -f); our (@gbp_build) = (''); our (@gbp_pq) = ('gbp pq'); our (@changesopts) = (''); our (@pbuilder) = ("sudo -E pbuilder","--no-source-only-changes"); our (@cowbuilder) = ("sudo -E cowbuilder","--no-source-only-changes"); our %opts_opt_map = ('dget' => \@dget, # accept for compatibility 'curl' => \@curl, 'dput' => \@dput, 'debsign' => \@debsign, 'gpg' => \@gpg, 'sbuild' => \@sbuild, 'ssh' => \@ssh, 'dgit' => \@dgit, 'git' => \@git, 'git-debrebase' => \@git_debrebase, 'apt-get' => \@aptget, 'apt-cache' => \@aptcache, 'dpkg-source' => \@dpkgsource, 'dpkg-buildpackage' => \@dpkgbuildpackage, 'dpkg-genchanges' => \@dpkggenchanges, 'gbp-build' => \@gbp_build, 'gbp-pq' => \@gbp_pq, 'ch' => \@changesopts, 'mergechanges' => \@mergechanges, 'pbuilder' => \@pbuilder, 'cowbuilder' => \@cowbuilder); our %opts_opt_cmdonly = ('gpg' => 1, 'git' => 1); our %opts_cfg_insertpos = map { $_, scalar @{ $opts_opt_map{$_} } } keys %opts_opt_map; sub parseopts_late_defaults(); sub quiltify_trees_differ ($$;$$$); sub setup_gitattrs(;$); sub check_gitattrs($$); our $playground; our $keyid; autoflush STDOUT 1; our $supplementary_message = ''; our $made_split_brain = 0; our $do_split_brain; # Interactions between quilt mode and split brain # (currently, split brain only implemented iff # madformat_wantfixup && quiltmode_splitting) # # source format sane `3.0 (quilt)' # madformat_wantfixup() # # quilt mode normal quiltmode # (eg linear) _splitbrain # # ------------ ------------------------------------------------ # # no split no q cache no q cache forbidden, # brain PM on master q fixup on master prevented # !do_split_brain() PM on master # # split brain no q cache q fixup cached, to dgit view # PM in dgit view PM in dgit view # # PM = pseudomerge to make ff, due to overwrite (or split view) # "no q cache" = do not record in cache on build, do not check cache # `3.0 (quilt)' with --quilt=nocheck is treated as sane format END { local ($@, $?); return unless forkcheck_mainprocess(); print STDERR "! $_\n" foreach $supplementary_message =~ m/^.+$/mg; } our $remotename = 'dgit'; our @ourdscfield = qw(Dgit Vcs-Dgit-Master); our $csuite; our $instead_distro; if (!defined $absurdity) { $absurdity = $0; $absurdity =~ s{/[^/]+$}{/absurd} or die; } sub madformat ($) { $_[0] eq '3.0 (quilt)' } sub lbranch () { return "$branchprefix/$csuite"; } my $lbranch_re = '^refs/heads/'.$branchprefix.'/([^/.]+)$'; sub lref () { return "refs/heads/".lbranch(); } sub lrref () { return "refs/remotes/$remotename/".server_branch($csuite); } sub rrref () { return server_ref($csuite); } sub srcfn ($$) { my ($vsn, $sfx) = @_; return &source_file_leafname($package, $vsn, $sfx); } sub is_orig_file_of_vsn ($$) { my ($f, $upstreamvsn) = @_; return is_orig_file_of_p_v($f, $package, $upstreamvsn); } sub dscfn ($) { my ($vsn) = @_; return srcfn($vsn,".dsc"); } sub changespat ($;$) { my ($vsn, $arch) = @_; return "${package}_".(stripepoch $vsn)."_".($arch//'*').".changes"; } our $us = 'dgit'; initdebug(''); our @end; END { local ($?); return unless forkcheck_mainprocess(); foreach my $f (@end) { eval { $f->(); }; print STDERR "$us: cleanup: $@" if length $@; } }; sub badcfg { print STDERR f_ "%s: invalid configuration: %s\n", $us, "@_"; finish 12; } sub forceable_fail ($$) { my ($forceoptsl, $msg) = @_; fail $msg unless grep { $forceopts{$_} } @$forceoptsl; print STDERR +(__ "warning: overriding problem due to --force:\n"). $msg; } sub forceing ($) { my ($forceoptsl) = @_; my @got = grep { $forceopts{$_} } @$forceoptsl; return 0 unless @got; print STDERR f_ "warning: skipping checks or functionality due to --force-%s\n", $got[0]; } sub no_such_package () { print STDERR f_ "%s: source package %s does not exist in suite %s\n", $us, $package, $isuite; finish 4; } sub deliberately ($) { my ($enquiry) = @_; return !!grep { $_ eq "--deliberately-$enquiry" } @deliberatelies; } sub deliberately_not_fast_forward () { foreach (qw(not-fast-forward fresh-repo)) { return 1 if deliberately($_) || deliberately("TEST-dgit-only-$_"); } } sub quiltmode_splitting () { $quilt_mode =~ m/gbp|dpm|unapplied|baredebian/; } sub format_quiltmode_splitting ($) { my ($format) = @_; return madformat_wantfixup($format) && quiltmode_splitting(); } sub do_split_brain () { !!($do_split_brain // confess) } sub opts_opt_multi_cmd { my $extra = shift; my @cmd; push @cmd, split /\s+/, shift @_; push @cmd, @$extra; push @cmd, @_; @cmd; } sub gbp_pq { return opts_opt_multi_cmd [], @gbp_pq; } sub dgit_privdir () { our $dgit_privdir_made //= ensure_a_playground 'dgit'; } sub bpd_abs () { my $r = $buildproductsdir; $r = "$maindir/$r" unless $r =~ m{^/}; return $r; } sub get_tree_of_commit ($) { my ($commitish) = @_; my $cdata = cmdoutput @git, qw(cat-file commit), $commitish; $cdata =~ m/\n\n/; $cdata = $`; $cdata =~ m/^tree (\w+)$/m or confess "cdata $cdata ?"; return $1; } sub branch_gdr_info ($$) { my ($symref, $head) = @_; my ($status, $msg, $current, $ffq_prev, $gdrlast) = gdr_ffq_prev_branchinfo($symref); return () unless $status eq 'branch'; $ffq_prev = git_get_ref $ffq_prev; $gdrlast = git_get_ref $gdrlast; $gdrlast &&= is_fast_fwd $gdrlast, $head; return ($ffq_prev, $gdrlast); } sub branch_is_gdr_unstitched_ff ($$$) { my ($symref, $head, $ancestor) = @_; my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $head); return 0 unless $ffq_prev; return 0 unless !defined $ancestor or is_fast_fwd $ancestor, $ffq_prev; return 1; } sub branch_is_gdr ($) { my ($head) = @_; # This is quite like git-debrebase's keycommits. # We have our own implementation because: # - our algorighm can do fewer tests so is faster # - it saves testing to see if gdr is installed # NB we use this jsut for deciding whether to run gdr make-patches # Before reusing this algorithm for somthing else, its # suitability should be reconsidered. my $walk = $head; local $Debian::Dgit::debugcmd_when_debuglevel = 3; printdebug "branch_is_gdr $head...\n"; my $get_patches = sub { my $t = git_cat_file "$_[0]:debian/patches", [qw(missing tree)]; return $t // ''; }; my $tip_patches = $get_patches->($head); WALK: for (;;) { my $cdata = git_cat_file $walk, 'commit'; my ($hdrs,$msg) = $cdata =~ m{\n\n} ? ($`,$') : ($cdata,''); if ($msg =~ m{^\[git-debrebase\ ( anchor | changelog | make-patches | merged-breakwater | pseudomerge ) [: ] }mx) { # no need to analyse this - it's sufficient # (gdr classifications: Anchor, MergedBreakwaters) # (made by gdr: Pseudomerge, Changelog) printdebug "branch_is_gdr $walk gdr $1 YES\n"; return 1; } my @parents = ($hdrs =~ m/^parent (\w+)$/gm); if (@parents==2) { my $walk_tree = get_tree_of_commit $walk; foreach my $p (@parents) { my $p_tree = get_tree_of_commit $p; if ($p_tree eq $walk_tree) { # pseudomerge contriburor # (gdr classification: Pseudomerge; not made by gdr) printdebug "branch_is_gdr $walk unmarked pseudomerge\n" if $debuglevel >= 2; $walk = $p; next WALK; } } # some other non-gdr merge # (gdr classification: VanillaMerge, DgitImportUnpatched, ?) printdebug "branch_is_gdr $walk ?-2-merge NO\n"; return 0; } if (@parents>2) { # (gdr classification: ?) printdebug "branch_is_gdr $walk ?-octopus NO\n"; return 0; } if (!@parents) { printdebug "branch_is_gdr $walk origin\n"; return 0; } if ($get_patches->($walk) ne $tip_patches) { # Our parent added, removed, or edited patches, and wasn't # a gdr make-patches commit. gdr make-patches probably # won't do that well, then. # (gdr classification of parent: AddPatches or ?) printdebug "branch_is_gdr $walk ?-patches NO\n"; return 0; } if ($tip_patches eq '' and !defined git_cat_file "$walk~:debian" and !quiltify_trees_differ "$walk~", $walk ) { # (gdr classification of parent: BreakwaterStart printdebug "branch_is_gdr $walk unmarked BreakwaterStart YES\n"; return 1; } # (gdr classification: Upstream Packaging Mixed Changelog) printdebug "branch_is_gdr $walk plain\n" if $debuglevel >= 2; $walk = $parents[0]; } } #---------- remote protocol support, common ---------- # remote push initiator/responder protocol: # $ dgit remote-push-build-host ... ... # where is ,... ... # < dgit-remote-push-ready # # occasionally: # # > progress NBYTES # [NBYTES message] # # > supplementary-message NBYTES # [NBYTES message] # # main sequence: # # > file parsed-changelog # [indicates that output of dpkg-parsechangelog follows] # > data-block NBYTES # > [NBYTES bytes of data (no newline)] # [maybe some more blocks] # > data-end # # > file dsc # [etc] # # > file changes # [etc] # # > param head DGIT-VIEW-HEAD # > param csuite SUITE # > param tagformat new # $protovsn == 4 # > param splitbrain 0|1 # $protovsn >= 6 # > param maint-view MAINT-VIEW-HEAD # # > param buildinfo-filename P_V_X.buildinfo # zero or more times # > file buildinfo # for buildinfos to sign # # > previously REFNAME=OBJNAME # if --deliberately-not-fast-forward # # goes into tag, for replay prevention # # > want signed-tag # [indicates that signed tag is wanted] # < data-block NBYTES # < [NBYTES bytes of data (no newline)] # [maybe some more blocks] # < data-end # < files-end # # > want signed-dsc-changes # < data-block NBYTES [transfer of signed dsc] # [etc] # < data-block NBYTES [transfer of signed changes] # [etc] # < data-block NBYTES [transfer of each signed buildinfo # [etc] same number and order as "file buildinfo"] # ... # < files-end # # > complete our $i_child_pid; sub i_child_report () { # Sees if our child has died, and reap it if so. Returns a string # describing how it died if it failed, or undef otherwise. return undef unless $i_child_pid; my $got = waitpid $i_child_pid, WNOHANG; return undef if $got <= 0; die unless $got == $i_child_pid; $i_child_pid = undef; return undef unless $?; return f_ "build host child %s", waitstatusmsg(); } sub badproto ($$) { my ($fh, $m) = @_; fail f_ "connection lost: %s", $! if $fh->error; fail f_ "protocol violation; %s not expected", $m; } sub badproto_badread ($$) { my ($fh, $wh) = @_; fail f_ "connection lost: %s", $! if $!; my $report = i_child_report(); fail $report if defined $report; badproto $fh, f_ "eof (reading %s)", $wh; } sub protocol_expect (&$) { my ($match, $fh) = @_; local $_; $_ = <$fh>; defined && chomp or badproto_badread $fh, __ "protocol message"; if (wantarray) { my @r = &$match; return @r if @r; } else { my $r = &$match; return $r if $r; } badproto $fh, f_ "\`%s'", $_; } sub protocol_send_file ($$) { my ($fh, $ourfn) = @_; open PF, "<", $ourfn or die "$ourfn: $!"; for (;;) { my $d; my $got = read PF, $d, 65536; die "$ourfn: $!" unless defined $got; last if !$got; print $fh "data-block ".length($d)."\n" or confess "$!"; print $fh $d or confess "$!"; } PF->error and die "$ourfn $!"; print $fh "data-end\n" or confess "$!"; close PF; } sub protocol_read_bytes ($$) { my ($fh, $nbytes) = @_; $nbytes =~ m/^[1-9]\d{0,5}$|^0$/ or badproto \*RO, __ "bad byte count"; my $d; my $got = read $fh, $d, $nbytes; $got==$nbytes or badproto_badread $fh, __ "data block"; return $d; } sub protocol_receive_file ($$) { my ($fh, $ourfn) = @_; printdebug "() $ourfn\n"; open PF, ">", $ourfn or die "$ourfn: $!"; for (;;) { my ($y,$l) = protocol_expect { m/^data-block (.*)$/ ? (1,$1) : m/^data-end$/ ? (0,) : (); } $fh; last unless $y; my $d = protocol_read_bytes $fh, $l; print PF $d or confess "$!"; } close PF or confess "$!"; } #---------- remote protocol support, responder ---------- sub responder_send_command ($) { my ($command) = @_; return unless $we_are_responder; # called even without $we_are_responder printdebug ">> $command\n"; print PO $command, "\n" or confess "$!"; } sub responder_send_file ($$) { my ($keyword, $ourfn) = @_; return unless $we_are_responder; printdebug "]] $keyword $ourfn\n"; responder_send_command "file $keyword"; protocol_send_file \*PO, $ourfn; } sub responder_receive_files ($@) { my ($keyword, @ourfns) = @_; die unless $we_are_responder; printdebug "[[ $keyword @ourfns\n"; responder_send_command "want $keyword"; foreach my $fn (@ourfns) { protocol_receive_file \*PI, $fn; } printdebug "[[\$\n"; protocol_expect { m/^files-end$/ } \*PI; } #---------- remote protocol support, initiator ---------- sub initiator_expect (&) { my ($match) = @_; protocol_expect { &$match } \*RO; } #---------- end remote code ---------- sub progress { if ($we_are_responder) { my $m = join '', @_; responder_send_command "progress ".length($m) or confess "$!"; print PO $m or confess "$!"; } else { print @_, "\n"; } } our $ua; sub url_get { if (!$ua) { $ua = LWP::UserAgent->new(); $ua->env_proxy; } my $what = $_[$#_]; progress "downloading $what..."; my $r = $ua->get(@_) or confess "$!"; return undef if $r->code == 404; $r->is_success or fail f_ "failed to fetch %s: %s", $what, $r->status_line; return $r->decoded_content(charset => 'none'); } our ($dscdata,$dscurl,$dsc,$dsc_checked,$skew_warning_vsn); sub act_local () { return $dryrun_level <= 1; } sub act_scary () { return !$dryrun_level; } sub printdone { if (!$dryrun_level) { progress f_ "%s ok: %s", $us, "@_"; } else { progress f_ "would be ok: %s (but dry run only)", "@_"; } } sub dryrun_report { printcmd(\*STDERR,$debugprefix."#",@_); } sub runcmd_ordryrun { if (act_scary()) { runcmd @_; } else { dryrun_report @_; } } sub runcmd_ordryrun_local { if (act_local()) { runcmd @_; } else { dryrun_report @_; } } our $helpmsg = i_ < sign tag and package with instead of default --dry-run -n do not change anything, but go through the motions --damp-run -L like --dry-run but make local changes, without signing --new -N allow introducing a new package --debug -D increase debug level -c= set git config option (used directly by dgit too) END our $later_warning_msg = i_ < 'debian', 'dgit.default.default-suite' => 'unstable', 'dgit.default.old-dsc-distro' => 'debian', 'dgit-suite.*-security.distro' => 'debian-security', 'dgit.default.username' => '', 'dgit.default.archive-query-default-component' => 'main', 'dgit.default.ssh' => 'ssh', 'dgit.default.archive-query' => 'madison:', 'dgit.default.sshpsql-dbname' => 'service=projectb', 'dgit.default.aptget-components' => 'main', 'dgit.default.source-only-uploads' => 'ok', 'dgit.dsc-url-proto-ok.http' => 'true', 'dgit.dsc-url-proto-ok.https' => 'true', 'dgit.dsc-url-proto-ok.git' => 'true', 'dgit.vcs-git.suites', => 'sid', # ;-separated 'dgit.default.dsc-url-proto-ok' => 'false', # old means "repo server accepts pushes with old dgit tags" # new means "repo server accepts pushes with new dgit tags" # maint means "repo server accepts split brain pushes" # hist means "repo server may have old pushes without new tag" # ("hist" is implied by "old") 'dgit-distro.debian.archive-query' => 'ftpmasterapi:', 'dgit-distro.debian.git-check' => 'url', 'dgit-distro.debian.git-check-suffix' => '/info/refs', 'dgit-distro.debian.new-private-pushers' => 't', 'dgit-distro.debian.source-only-uploads' => 'not-wholly-new', 'dgit-distro.debian/push.git-url' => '', 'dgit-distro.debian/push.git-host' => 'push.dgit.debian.org', 'dgit-distro.debian/push.git-user-force' => 'dgit', 'dgit-distro.debian/push.git-proto' => 'git+ssh://', 'dgit-distro.debian/push.git-path' => '/dgit/debian/repos', 'dgit-distro.debian/push.git-create' => 'true', 'dgit-distro.debian/push.git-check' => 'ssh-cmd', 'dgit-distro.debian.archive-query-url', 'https://api.ftp-master.debian.org/', # 'dgit-distro.debian.archive-query-tls-key', # '/etc/ssl/certs/%HOST%.pem:/etc/dgit/%HOST%.pem', # ^ this does not work because curl is broken nowadays # Fixing #790093 properly will involve providing providing the key # in some pacagke and maybe updating these paths. # # 'dgit-distro.debian.archive-query-tls-curl-args', # '--ca-path=/etc/ssl/ca-debian', # ^ this is a workaround but works (only) on DSA-administered machines 'dgit-distro.debian.git-url' => 'https://git.dgit.debian.org', 'dgit-distro.debian.git-url-suffix' => '', 'dgit-distro.debian.upload-host' => 'ftp-master', # for dput 'dgit-distro.debian.mirror' => 'http://ftp.debian.org/debian/', 'dgit-distro.debian-security.archive-query' => 'aptget:', 'dgit-distro.debian-security.mirror' => 'http://security.debian.org/debian-security/', 'dgit-distro.debian-security.aptget-suite-map' => 's#-security$#/updates#', 'dgit-distro.debian-security.aptget-suite-rmap' => 's#$#-security#', 'dgit-distro.debian-security.nominal-distro' => 'debian', 'dgit-distro.debian.backports-quirk' => '(squeeze)-backports*', 'dgit-distro.debian-backports.mirror' => 'http://backports.debian.org/debian-backports/', 'dgit-distro.ubuntu.git-check' => 'false', 'dgit-distro.ubuntu.mirror' => 'http://archive.ubuntu.com/ubuntu', 'dgit-distro.test-dummy.ssh' => "$td/ssh", 'dgit-distro.test-dummy.username' => "alice", 'dgit-distro.test-dummy.git-check' => "ssh-cmd", 'dgit-distro.test-dummy.git-create' => "ssh-cmd", 'dgit-distro.test-dummy.git-url' => "$td/git", 'dgit-distro.test-dummy.git-host' => "git", 'dgit-distro.test-dummy.git-path' => "$td/git", 'dgit-distro.test-dummy.archive-query' => "dummycatapi:", 'dgit-distro.test-dummy.archive-query-url' => "file://$td/aq/", 'dgit-distro.test-dummy.mirror' => "file://$td/mirror/", 'dgit-distro.test-dummy.upload-host' => 'test-dummy', ); our %gitcfgs; our @gitcfgsources = qw(cmdline local global system); our $invoked_in_git_tree = 1; sub git_slurp_config () { # This algoritm is a bit subtle, but this is needed so that for # options which we want to be single-valued, we allow the # different config sources to override properly. See #835858. foreach my $src (@gitcfgsources) { next if $src eq 'cmdline'; # we do this ourselves since git doesn't handle it $gitcfgs{$src} = git_slurp_config_src $src; } } sub git_get_config ($) { my ($c) = @_; foreach my $src (@gitcfgsources) { my $l = $gitcfgs{$src}{$c}; confess "internal error ($l $c)" if $l && !ref $l; printdebug"C $c ".(defined $l ? join " ", map { messagequote "'$_'" } @$l : "undef")."\n" if $debuglevel >= 4; $l or next; @$l==1 or badcfg f_ "multiple values for %s (in %s git config)", $c, $src if @$l > 1; $l->[0] =~ m/\n/ and badcfg f_ "value for config option %s (in %s git config) contains newline(s)!", $c, $src; return $l->[0]; } return undef; } sub cfg { foreach my $c (@_) { return undef if $c =~ /RETURN-UNDEF/; printdebug "C? $c\n" if $debuglevel >= 5; my $v = git_get_config($c); return $v if defined $v; my $dv = $defcfg{$c}; if (defined $dv) { printdebug "CD $c $dv\n" if $debuglevel >= 4; return $dv; } } badcfg f_ "need value for one of: %s\n". "%s: distro or suite appears not to be (properly) supported", "@_", $us; } sub not_necessarily_a_tree () { # needs to be called from pre_* @gitcfgsources = grep { $_ ne 'local' } @gitcfgsources; $invoked_in_git_tree = 0; } sub access_basedistro__noalias () { if (defined $idistro) { return $idistro; } else { my $def = cfg("dgit-suite.$isuite.distro", 'RETURN-UNDEF'); return $def if defined $def; foreach my $src (@gitcfgsources, 'internal') { my $kl = $src eq 'internal' ? \%defcfg : $gitcfgs{$src}; next unless $kl; foreach my $k (keys %$kl) { next unless $k =~ m#^dgit-suite\.(.*)\.distro$#; my $dpat = $1; next unless match_glob $dpat, $isuite; return $kl->{$k}; } } return cfg("dgit.default.distro"); } } sub access_basedistro () { my $noalias = access_basedistro__noalias(); my $canon = cfg("dgit-distro.$noalias.alias-canon",'RETURN-UNDEF'); return $canon // $noalias; } sub access_nomdistro () { my $base = access_basedistro(); my $r = cfg("dgit-distro.$base.nominal-distro",'RETURN-UNDEF') // $base; $r =~ m/^$distro_re$/ or badcfg f_ "bad syntax for (nominal) distro \`%s' (does not match %s)", $r, "/^$distro_re$/"; return $r; } sub access_quirk () { # returns (quirk name, distro to use instead or undef, quirk-specific info) my $basedistro = access_basedistro(); my $backports_quirk = cfg("dgit-distro.$basedistro.backports-quirk", 'RETURN-UNDEF'); if (defined $backports_quirk) { my $re = $backports_quirk; $re =~ s/[^-0-9a-z_\%*()]/\\$&/ig; $re =~ s/\*/.*/g; $re =~ s/\%/([-0-9a-z_]+)/ or $re =~ m/[()]/ or badcfg __ "backports-quirk needs \% or ( )"; if ($isuite =~ m/^$re$/) { return ('backports',"$basedistro-backports",$1); } } return ('none',undef); } our $access_forpush; sub parse_cfg_bool ($$$) { my ($what,$def,$v) = @_; $v //= $def; return $v =~ m/^[ty1]/ ? 1 : $v =~ m/^[fn0]/ ? 0 : badcfg f_ "%s needs t (true, y, 1) or f (false, n, 0) not \`%s'", $what, $v; } sub access_forpush_config () { my $d = access_basedistro(); return 1 if $new_package && parse_cfg_bool('new-private-pushers', 0, cfg("dgit-distro.$d.new-private-pushers", 'RETURN-UNDEF')); my $v = cfg("dgit-distro.$d.readonly", 'RETURN-UNDEF'); $v //= 'a'; return $v =~ m/^[ty1]/ ? 0 : # force readonly, forpush = 0 $v =~ m/^[fn0]/ ? 1 : # force nonreadonly, forpush = 1 $v =~ m/^[a]/ ? '' : # auto, forpush = '' badcfg __ "readonly needs t (true, y, 1) or f (false, n, 0) or a (auto)"; } sub access_forpush () { $access_forpush //= access_forpush_config(); return $access_forpush; } sub default_from_access_cfg ($$$;$) { my ($var, $keybase, $defval, $permit_re) = @_; return if defined $$var; $$var = access_cfg("$keybase-newer", 'RETURN-UNDEF'); $$var = undef if $$var && $$var !~ m/^$permit_re$/; $$var //= access_cfg($keybase, 'RETURN-UNDEF'); $$var //= $defval; badcfg f_ "unknown %s \`%s'", $keybase, $$var if defined $permit_re and $$var !~ m/$permit_re/; } sub pushing () { confess +(__ 'internal error').' '.Dumper($access_forpush)," ?" if defined $access_forpush and !$access_forpush; badcfg __ "pushing but distro is configured readonly" if access_forpush_config() eq '0'; $access_forpush = 1; $supplementary_message = __ <<'END' unless $we_are_responder; Push failed, before we got started. You can retry the push, after fixing the problem, if you like. END parseopts_late_defaults(); } sub notpushing () { parseopts_late_defaults(); } sub determine_whether_split_brain ($) { my ($format) = @_; { local $access_forpush; default_from_access_cfg(\$splitview_mode, 'split-view', 'auto', $splitview_modes_re); $do_split_brain = 1 if $splitview_mode eq 'always'; } printdebug "format $format, quilt mode $quilt_mode\n"; if (format_quiltmode_splitting $format) { $splitview_mode ne 'never' or fail f_ "dgit: quilt mode \`%s' (for format \`%s')". " implies split view, but split-view set to \`%s'", $quilt_mode, $format, $splitview_mode; $do_split_brain = 1; } $do_split_brain //= 0; } sub supplementary_message ($) { my ($msg) = @_; if (!$we_are_responder) { $supplementary_message = $msg; return; } else { responder_send_command "supplementary-message ".length($msg) or confess "$!"; print PO $msg or confess "$!"; } } sub access_distros () { # Returns list of distros to try, in order # # We want to try: # 0. `instead of' distro name(s) we have been pointed to # 1. the access_quirk distro, if any # 2a. the user's specified distro, or failing that } basedistro # 2b. the distro calculated from the suite } my @l = access_basedistro(); my (undef,$quirkdistro) = access_quirk(); unshift @l, $quirkdistro; unshift @l, $instead_distro; @l = grep { defined } @l; push @l, access_nomdistro(); if (access_forpush()) { @l = map { ("$_/push", $_) } @l; } @l; } sub access_cfg_cfgs (@) { my (@keys) = @_; my @cfgs; # The nesting of these loops determines the search order. We put # the key loop on the outside so that we search all the distros # for each key, before going on to the next key. That means that # if access_cfg is called with a more specific, and then a less # specific, key, an earlier distro can override the less specific # without necessarily overriding any more specific keys. (If the # distro wants to override the more specific keys it can simply do # so; whereas if we did the loop the other way around, it would be # impossible to for an earlier distro to override a less specific # key but not the more specific ones without restating the unknown # values of the more specific keys. my @realkeys; my @rundef; # We have to deal with RETURN-UNDEF specially, so that we don't # terminate the search prematurely. foreach (@keys) { if (m/RETURN-UNDEF/) { push @rundef, $_; last; } push @realkeys, $_ } foreach my $d (access_distros()) { push @cfgs, map { "dgit-distro.$d.$_" } @realkeys; } push @cfgs, map { "dgit.default.$_" } @realkeys; push @cfgs, @rundef; return @cfgs; } sub access_cfg (@) { my (@keys) = @_; my (@cfgs) = access_cfg_cfgs(@keys); my $value = cfg(@cfgs); return $value; } sub access_cfg_bool ($$) { my ($def, @keys) = @_; parse_cfg_bool($keys[0], $def, access_cfg(@keys, 'RETURN-UNDEF')); } sub string_to_ssh ($) { my ($spec) = @_; if ($spec =~ m/\s/) { return qw(sh -ec), 'exec '.$spec.' "$@"', 'x'; } else { return ($spec); } } sub access_cfg_ssh () { my $gitssh = access_cfg('ssh', 'RETURN-UNDEF'); if (!defined $gitssh) { return @ssh; } else { return string_to_ssh $gitssh; } } sub access_runeinfo ($) { my ($info) = @_; return ": dgit ".access_basedistro()." $info ;"; } sub access_someuserhost ($) { my ($some) = @_; my $user = access_cfg("$some-user-force", 'RETURN-UNDEF'); defined($user) && length($user) or $user = access_cfg("$some-user",'username'); my $host = access_cfg("$some-host"); return length($user) ? "$user\@$host" : $host; } sub access_gituserhost () { return access_someuserhost('git'); } sub access_giturl (;$) { my ($optional) = @_; my $url = access_cfg('git-url','RETURN-UNDEF'); my $suffix; if (!length $url) { my $proto = access_cfg('git-proto', 'RETURN-UNDEF'); return undef unless defined $proto; $url = $proto. access_gituserhost(). access_cfg('git-path'); } else { $suffix = access_cfg('git-url-suffix','RETURN-UNDEF'); } $suffix //= '.git'; return "$url/$package$suffix"; } sub commit_getclogp ($) { # Returns the parsed changelog hashref for a particular commit my ($objid) = @_; our %commit_getclogp_memo; my $memo = $commit_getclogp_memo{$objid}; return $memo if $memo; my $mclog = dgit_privdir()."clog"; runcmd shell_cmd "exec >$mclog", @git, qw(cat-file blob), "$objid:debian/changelog"; $commit_getclogp_memo{$objid} = parsechangelog("-l$mclog"); } sub parse_dscdata () { my $dscfh = new IO::File \$dscdata, '<' or confess "$!"; printdebug Dumper($dscdata) if $debuglevel>1; $dsc = parsecontrolfh($dscfh,$dscurl,1); printdebug Dumper($dsc) if $debuglevel>1; } our %rmad; sub archive_query ($;@) { my ($method) = shift @_; fail __ "this operation does not support multiple comma-separated suites" if $isuite =~ m/,/; my $query = access_cfg('archive-query','RETURN-UNDEF'); $query =~ s/^(\w+):// or badcfg "invalid archive-query method \`$query'"; my $proto = $1; my $data = $'; #'; { no strict qw(refs); &{"${method}_${proto}"}($proto,$data,@_); } } sub archive_query_prepend_mirror { my $m = access_cfg('mirror'); return map { [ $_->[0], $m.$_->[1], @$_[2..$#$_] ] } @_; } sub pool_dsc_subpath ($$) { my ($vsn,$component) = @_; # $package is implict arg my $prefix = substr($package, 0, $package =~ m/^l/ ? 4 : 1); return "/pool/$component/$prefix/$package/".dscfn($vsn); } sub cfg_apply_map ($$$) { my ($varref, $what, $mapspec) = @_; return unless $mapspec; printdebug "config $what EVAL{ $mapspec; }\n"; $_ = $$varref; eval "package Dgit::Config; $mapspec;"; die $@ if $@; $$varref = $_; } #---------- `ftpmasterapi' archive query method (nascent) ---------- sub archive_api_query_cmd ($) { my ($subpath) = @_; my @cmd = (@curl, qw(-sS)); my $url = access_cfg('archive-query-url'); if ($url =~ m#^https://([-.0-9a-z]+)/#) { my $host = $1; my $keys = access_cfg('archive-query-tls-key','RETURN-UNDEF') //''; foreach my $key (split /\:/, $keys) { $key =~ s/\%HOST\%/$host/g; if (!stat $key) { fail "for $url: stat $key: $!" unless $!==ENOENT; next; } fail f_ "config requested specific TLS key but do not know". " how to get curl to use exactly that EE key (%s)", $key; # push @cmd, "--cacert", $key, "--capath", "/dev/enoent"; # # Sadly the above line does not work because of changes # # to gnutls. The real fix for #790093 may involve # # new curl options. last; } # Fixing #790093 properly will involve providing a value # for this on clients. my $kargs = access_cfg('archive-query-tls-curl-ca-args','RETURN-UNDEF'); push @cmd, split / /, $kargs if defined $kargs; } push @cmd, $url.$subpath; return @cmd; } sub api_query ($$;$) { use JSON; my ($data, $subpath, $ok404) = @_; badcfg __ "ftpmasterapi archive query method takes no data part" if length $data; my @cmd = archive_api_query_cmd($subpath); my $url = $cmd[$#cmd]; push @cmd, qw(-w %{http_code}); my $json = cmdoutput @cmd; unless ($json =~ s/\d+\d+\d$//) { failedcmd_report_cmd undef, @cmd; fail __ "curl failed to print 3-digit HTTP code"; } my $code = $&; return undef if $code eq '404' && $ok404; fail f_ "fetch of %s gave HTTP code %s", $url, $code unless $url =~ m#^file://# or $code =~ m/^2/; return decode_json($json); } sub canonicalise_suite_ftpmasterapi { my ($proto,$data) = @_; my $suites = api_query($data, 'suites'); my @matched; foreach my $entry (@$suites) { next unless grep { my $v = $entry->{$_}; defined $v && $v eq $isuite; } qw(codename name); push @matched, $entry; } fail f_ "unknown suite %s, maybe -d would help", $isuite unless @matched; my $cn; eval { @matched==1 or die f_ "multiple matches for suite %s\n", $isuite; $cn = "$matched[0]{codename}"; defined $cn or die f_ "suite %s info has no codename\n", $isuite; $cn =~ m/^$suite_re$/ or die f_ "suite %s maps to bad codename\n", $isuite; }; die +(__ "bad ftpmaster api response: ")."$@\n".Dumper(\@matched) if length $@; return $cn; } sub archive_query_ftpmasterapi { my ($proto,$data) = @_; my $info = api_query($data, "dsc_in_suite/$isuite/$package"); my @rows; my $digester = Digest::SHA->new(256); foreach my $entry (@$info) { eval { my $vsn = "$entry->{version}"; my ($ok,$msg) = version_check $vsn; die f_ "bad version: %s\n", $msg unless $ok; my $component = "$entry->{component}"; $component =~ m/^$component_re$/ or die __ "bad component"; my $filename = "$entry->{filename}"; $filename && $filename !~ m#[^-+:._~0-9a-zA-Z/]|^[/.]|/[/.]# or die __ "bad filename"; my $sha256sum = "$entry->{sha256sum}"; $sha256sum =~ m/^[0-9a-f]+$/ or die __ "bad sha256sum"; push @rows, [ $vsn, "/pool/$component/$filename", $digester, $sha256sum ]; }; die +(__ "bad ftpmaster api response: ")."$@\n".Dumper($entry) if length $@; } @rows = sort { -version_compare($a->[0],$b->[0]) } @rows; return archive_query_prepend_mirror @rows; } sub file_in_archive_ftpmasterapi { my ($proto,$data,$filename) = @_; my $pat = $filename; $pat =~ s/_/\\_/g; $pat = "%/$pat"; $pat =~ s#[^-+_.0-9a-z/]# sprintf '%%%02x', ord $& #ge; my $info = api_query($data, "file_in_archive/$pat", 1); } sub package_not_wholly_new_ftpmasterapi { my ($proto,$data,$pkg) = @_; my $info = api_query($data,"madison?package=${pkg}&f=json"); return !!@$info; } #---------- `aptget' archive query method ---------- our $aptget_base; our $aptget_releasefile; our $aptget_configpath; sub aptget_aptget () { return @aptget, qw(-c), $aptget_configpath; } sub aptget_aptcache () { return @aptcache, qw(-c), $aptget_configpath; } sub aptget_cache_clean { runcmd_ordryrun_local qw(sh -ec), 'cd "$1"; find -atime +30 -type f -print0 | xargs -0r rm --', 'x', $aptget_base; } sub aptget_lock_acquire () { my $lockfile = "$aptget_base/lock"; open APTGET_LOCK, '>', $lockfile or confess "open $lockfile: $!"; flock APTGET_LOCK, LOCK_EX or confess "lock $lockfile: $!"; } sub aptget_prep ($) { my ($data) = @_; return if defined $aptget_base; badcfg __ "aptget archive query method takes no data part" if length $data; my $cache = $ENV{XDG_CACHE_DIR} // "$ENV{HOME}/.cache"; ensuredir $cache; ensuredir "$cache/dgit"; my $cachekey = access_cfg('aptget-cachekey','RETURN-UNDEF') // access_nomdistro(); $aptget_base = "$cache/dgit/aptget"; ensuredir $aptget_base; my $quoted_base = $aptget_base; confess "$quoted_base contains bad chars, cannot continue" if $quoted_base =~ m/["\\]/; # apt.conf(5) says no escaping :-/ ensuredir $aptget_base; aptget_lock_acquire(); aptget_cache_clean(); $aptget_configpath = "$aptget_base/apt.conf#$cachekey"; my $sourceslist = "source.list#$cachekey"; my $aptsuites = $isuite; cfg_apply_map(\$aptsuites, 'suite map', access_cfg('aptget-suite-map', 'RETURN-UNDEF')); open SRCS, ">", "$aptget_base/$sourceslist" or confess "$!"; printf SRCS "deb-src %s %s %s\n", access_cfg('mirror'), $aptsuites, access_cfg('aptget-components') or confess "$!"; ensuredir "$aptget_base/cache"; ensuredir "$aptget_base/lists"; open CONF, ">", $aptget_configpath or confess "$!"; print CONF <) { next unless stat_exists $oldlist; my ($mtime) = (stat _)[9]; utime $oldatime, $mtime, $oldlist or die "$oldlist $!"; } runcmd_ordryrun_local aptget_aptget(), qw(update); my @releasefiles; foreach my $oldlist (<$aptget_base/lists/*Release>) { next unless stat_exists $oldlist; my ($atime) = (stat _)[8]; next if $atime == $oldatime; push @releasefiles, $oldlist; } my @inreleasefiles = grep { m#/InRelease$# } @releasefiles; @releasefiles = @inreleasefiles if @inreleasefiles; if (!@releasefiles) { fail f_ <{$name}; if (defined $val) { printdebug "release file $name: $val\n"; $val =~ m/^$suite_re$/o or fail f_ "Release file (%s) specifies intolerable %s", $aptget_releasefile, $name; cfg_apply_map(\$val, 'suite rmap', access_cfg('aptget-suite-rmap', 'RETURN-UNDEF')); return $val } } return $isuite; } sub archive_query_aptget { my ($proto,$data) = @_; aptget_prep($data); ensuredir "$aptget_base/source"; foreach my $old (<$aptget_base/source/*.dsc>) { unlink $old or die "$old: $!"; } my $showsrc = cmdoutput aptget_aptcache(), qw(showsrc), $package; return () unless $showsrc =~ m/^package:\s*\Q$package\E\s*$/mi; # avoids apt-get source failing with ambiguous error code runcmd_ordryrun_local shell_cmd 'cd "$1"/source; shift', $aptget_base, aptget_aptget(), qw(--download-only --only-source source), $package; my @dscs = <$aptget_base/source/*.dsc>; fail __ "apt-get source did not produce a .dsc" unless @dscs; fail f_ "apt-get source produced several .dscs (%s)", "@dscs" unless @dscs==1; my $pre_dsc = parsecontrol $dscs[0], $dscs[0], 1; use URI::Escape; my $uri = "file://". uri_escape $dscs[0]; $uri =~ s{\%2f}{/}gi; return [ (getfield $pre_dsc, 'Version'), $uri ]; } sub file_in_archive_aptget () { return undef; } sub package_not_wholly_new_aptget () { return undef; } #---------- `dummyapicat' archive query method ---------- # (untranslated, because this is for testing purposes etc.) sub archive_query_dummycatapi { archive_query_ftpmasterapi @_; } sub canonicalise_suite_dummycatapi { canonicalise_suite_ftpmasterapi @_; } sub dummycatapi_run_in_mirror ($@) { # runs $fn with FIA open onto rune my ($rune, $argl, $fn) = @_; my $mirror = access_cfg('mirror'); $mirror =~ s#^file://#/# or die "$mirror ?"; my @cmd = (qw(sh -ec), 'cd "$1"; shift'."\n".$rune, qw(x), $mirror, @$argl); debugcmd "-|", @cmd; open FIA, "-|", @cmd or confess "$!"; my $r = $fn->(); close FIA or ($!==0 && $?==141) or die failedcmd @cmd; return $r; } sub file_in_archive_dummycatapi ($$$) { my ($proto,$data,$filename) = @_; my @out; dummycatapi_run_in_mirror ' find -name "$1" -print0 | xargs -0r sha256sum ', [$filename], sub { while () { chomp or die; printdebug "| $_\n"; m/^(\w+) (\S+)$/ or die "$_ ?"; push @out, { sha256sum => $1, filename => $2 }; } }; return \@out; } sub package_not_wholly_new_dummycatapi { my ($proto,$data,$pkg) = @_; dummycatapi_run_in_mirror " find -name ${pkg}_*.dsc ", [], sub { local $/ = undef; !!; }; } #---------- `madison' archive query method ---------- sub archive_query_madison { return archive_query_prepend_mirror map { [ @$_[0..1] ] } madison_get_parse(@_); } sub madison_get_parse { my ($proto,$data) = @_; die unless $proto eq 'madison'; if (!length $data) { $data= access_cfg('madison-distro','RETURN-UNDEF'); $data //= access_basedistro(); } $rmad{$proto,$data,$package} ||= cmdoutput qw(rmadison -asource),"-s$isuite","-u$data",$package; my $rmad = $rmad{$proto,$data,$package}; my @out; foreach my $l (split /\n/, $rmad) { $l =~ m{^ \s*( [^ \t|]+ )\s* \| \s*( [^ \t|]+ )\s* \| \s*( [^ \t|/]+ )(?:/([^ \t|/]+))? \s* \| \s*( [^ \t|]+ )\s* }x or die "$rmad ?"; $1 eq $package or die "$rmad $package ?"; my $vsn = $2; my $newsuite = $3; my $component; if (defined $4) { $component = $4; } else { $component = access_cfg('archive-query-default-component'); } $5 eq 'source' or die "$rmad ?"; push @out, [$vsn,pool_dsc_subpath($vsn,$component),$newsuite]; } return sort { -version_compare($a->[0],$b->[0]); } @out; } sub canonicalise_suite_madison { # madison canonicalises for us my @r = madison_get_parse(@_); @r or fail f_ "unable to canonicalise suite using package %s". " which does not appear to exist in suite %s;". " --existing-package may help", $package, $isuite; return $r[0][2]; } sub file_in_archive_madison { return undef; } sub package_not_wholly_new_madison { return undef; } #---------- `sshpsql' archive query method ---------- # (untranslated, because this is obsolete) sub sshpsql ($$$) { my ($data,$runeinfo,$sql) = @_; if (!length $data) { $data= access_someuserhost('sshpsql').':'. access_cfg('sshpsql-dbname'); } $data =~ m/:/ or badcfg "invalid sshpsql method string \`$data'"; my ($userhost,$dbname) = ($`,$'); #'; my @rows; my @cmd = (access_cfg_ssh, $userhost, access_runeinfo("ssh-psql $runeinfo"). " export LC_MESSAGES=C; export LC_CTYPE=C;". " ".shellquote qw(psql -A), $dbname, qw(-c), $sql); debugcmd "|",@cmd; open P, "-|", @cmd or confess "$!"; while (

) { chomp or die; printdebug(">|$_|\n"); push @rows, $_; } $!=0; $?=0; close P or failedcmd @cmd; @rows or die; my $nrows = pop @rows; $nrows =~ s/^\((\d+) rows?\)$/$1/ or die "$nrows ?"; @rows == $nrows+1 or die "$nrows ".(scalar @rows)." ?"; @rows = map { [ split /\|/, $_ ] } @rows; my $ncols = scalar @{ shift @rows }; die if grep { scalar @$_ != $ncols } @rows; return @rows; } sub sql_injection_check { foreach (@_) { die "$_ $& ?" if m{[^-+=:_.,/0-9a-zA-Z]}; } } sub archive_query_sshpsql ($$) { my ($proto,$data) = @_; sql_injection_check $isuite, $package; my @rows = sshpsql($data, "archive-query $isuite $package", <[0],$b->[0]) } @rows; my $digester = Digest::SHA->new(256); @rows = map { my ($vsn,$component,$filename,$sha256sum) = @$_; [ $vsn, "/pool/$component/$filename",$digester,$sha256sum ]; } @rows; return archive_query_prepend_mirror @rows; } sub canonicalise_suite_sshpsql ($$) { my ($proto,$data) = @_; sql_injection_check $isuite; my @rows = sshpsql($data, "canonicalise-suite $isuite", <[0] } @rows; fail "unknown suite $isuite" unless @rows; die "ambiguous $isuite: @rows ?" if @rows>1; return $rows[0]; } sub file_in_archive_sshpsql ($$$) { return undef; } sub package_not_wholly_new_sshpsql ($$$) { return undef; } #---------- `dummycat' archive query method ---------- # (untranslated, because this is for testing purposes etc.) sub canonicalise_suite_dummycat ($$) { my ($proto,$data) = @_; my $dpath = "$data/suite.$isuite"; if (!open C, "<", $dpath) { $!==ENOENT or die "$dpath: $!"; printdebug "dummycat canonicalise_suite $isuite $dpath ENOENT\n"; return $isuite; } $!=0; $_ = ; chomp or die "$dpath: $!"; close C; printdebug "dummycat canonicalise_suite $isuite $dpath = $_\n"; return $_; } sub archive_query_dummycat ($$) { my ($proto,$data) = @_; canonicalise_suite(); my $dpath = "$data/package.$csuite.$package"; if (!open C, "<", $dpath) { $!==ENOENT or die "$dpath: $!"; printdebug "dummycat query $csuite $package $dpath ENOENT\n"; return (); } my @rows; while () { next if m/^\#/; next unless m/\S/; die unless chomp; printdebug "dummycat query $csuite $package $dpath | $_\n"; my @row = split /\s+/, $_; @row==2 or die "$dpath: $_ ?"; push @rows, \@row; } C->error and die "$dpath: $!"; close C; return archive_query_prepend_mirror sort { -version_compare($a->[0],$b->[0]); } @rows; } sub file_in_archive_dummycat () { return undef; } sub package_not_wholly_new_dummycat () { return undef; } #---------- archive query entrypoints and rest of program ---------- sub canonicalise_suite () { return if defined $csuite; fail f_ "cannot operate on %s suite", $isuite if $isuite eq 'UNRELEASED'; $csuite = archive_query('canonicalise_suite'); if ($isuite ne $csuite) { progress f_ "canonical suite name for %s is %s", $isuite, $csuite; } else { progress f_ "canonical suite name is %s", $csuite; } } sub get_archive_dsc () { canonicalise_suite(); my @vsns = archive_query('archive_query'); foreach my $vinfo (@vsns) { my ($vsn,$vsn_dscurl,$digester,$digest) = @$vinfo; $dscurl = $vsn_dscurl; $dscdata = url_get($dscurl); if (!$dscdata) { $skew_warning_vsn = $vsn if !defined $skew_warning_vsn; next; } if ($digester) { $digester->reset(); $digester->add($dscdata); my $got = $digester->hexdigest(); $got eq $digest or fail f_ "%s has hash %s but archive told us to expect %s", $dscurl, $got, $digest; } parse_dscdata(); my $fmt = getfield $dsc, 'Format'; $format_ok{$fmt} or forceable_fail [qw(unsupported-source-format)], f_ "unsupported source format %s, sorry", $fmt; $dsc_checked = !!$digester; printdebug "get_archive_dsc: Version ".(getfield $dsc, 'Version')."\n"; return; } $dsc = undef; printdebug "get_archive_dsc: nothing in archive, returning undef\n"; } sub check_for_git (); sub check_for_git () { # returns 0 or 1 my $how = access_cfg('git-check'); if ($how eq 'ssh-cmd') { my @cmd = (access_cfg_ssh, access_gituserhost(), access_runeinfo("git-check $package"). " set -e; cd ".access_cfg('git-path').";". " if test -d $package.git; then echo 1; else echo 0; fi"); my $r= cmdoutput @cmd; if (defined $r and $r =~ m/^divert (\w+)$/) { my $divert=$1; my ($usedistro,) = access_distros(); # NB that if we are pushing, $usedistro will be $distro/push $instead_distro= cfg("dgit-distro.$usedistro.diverts.$divert"); $instead_distro =~ s{^/}{ access_basedistro()."/" }e; progress f_ "diverting to %s (using config for %s)", $divert, $instead_distro; return check_for_git(); } failedcmd @cmd unless defined $r and $r =~ m/^[01]$/; return $r+0; } elsif ($how eq 'url') { my $prefix = access_cfg('git-check-url','git-url'); my $suffix = access_cfg('git-check-suffix','git-suffix', 'RETURN-UNDEF') // '.git'; my $url = "$prefix/$package$suffix"; my @cmd = (@curl, qw(-sS -I), $url); my $result = cmdoutput @cmd; $result =~ s/^\S+ 200 .*\n\r?\n//; # curl -sS -I with https_proxy prints # HTTP/1.0 200 Connection established $result =~ m/^\S+ (404|200) /s or fail +(__ "unexpected results from git check query - "). Dumper($prefix, $result); my $code = $1; if ($code eq '404') { return 0; } elsif ($code eq '200') { return 1; } else { die; } } elsif ($how eq 'true') { return 1; } elsif ($how eq 'false') { return 0; } else { badcfg f_ "unknown git-check \`%s'", $how; } } sub create_remote_git_repo () { my $how = access_cfg('git-create'); if ($how eq 'ssh-cmd') { runcmd_ordryrun (access_cfg_ssh, access_gituserhost(), access_runeinfo("git-create $package"). "set -e; cd ".access_cfg('git-path').";". " cp -a _template $package.git"); } elsif ($how eq 'true') { # nothing to do } else { badcfg f_ "unknown git-create \`%s'", $how; } } our ($dsc_hash,$lastpush_mergeinput); our ($dsc_distro, $dsc_hint_tag, $dsc_hint_url); sub prep_ud () { dgit_privdir(); # ensures that $dgit_privdir_made is based on $maindir $playground = fresh_playground 'dgit/unpack'; } sub mktree_in_ud_here () { playtree_setup $gitcfgs{local}; } sub git_write_tree () { my $tree = cmdoutput @git, qw(write-tree); $tree =~ m/^\w+$/ or die "$tree ?"; return $tree; } sub git_add_write_tree () { runcmd @git, qw(add -Af .); return git_write_tree(); } sub remove_stray_gits ($) { my ($what) = @_; my @gitscmd = qw(find -name .git -prune -print0); debugcmd "|",@gitscmd; open GITS, "-|", @gitscmd or confess "$!"; { local $/="\0"; while () { chomp or die; print STDERR f_ "%s: warning: removing from %s: %s\n", $us, $what, (messagequote $_); rmtree $_; } } $!=0; $?=0; close GITS or failedcmd @gitscmd; } sub mktree_in_ud_from_only_subdir ($;$) { my ($what,$raw) = @_; # changes into the subdir my (@dirs) = <*/.>; confess "expected one subdir but found @dirs ?" unless @dirs==1; $dirs[0] =~ m#^([^/]+)/\.$# or die; my $dir = $1; changedir $dir; remove_stray_gits($what); mktree_in_ud_here(); if (!$raw) { my ($format, $fopts) = get_source_format(); if (madformat($format)) { rmtree '.pc'; } } my $tree=git_add_write_tree(); return ($tree,$dir); } our @files_csum_info_fields = (['Checksums-Sha256','Digest::SHA', 'new(256)', 'sha256sum'], ['Checksums-Sha1', 'Digest::SHA', 'new(1)', 'sha1sum'], ['Files', 'Digest::MD5', 'new()', 'md5sum']); sub dsc_files_info () { foreach my $csumi (@files_csum_info_fields) { my ($fname, $module, $method) = @$csumi; my $field = $dsc->{$fname}; next unless defined $field; eval "use $module; 1;" or die $@; my @out; foreach (split /\n/, $field) { next unless m/\S/; m/^(\w+) (\d+) (\S+)$/ or fail f_ "could not parse .dsc %s line \`%s'", $fname, $_; my $digester = eval "$module"."->$method;" or die $@; push @out, { Hash => $1, Bytes => $2, Filename => $3, Digester => $digester, }; } return @out; } fail f_ "missing any supported Checksums-* or Files field in %s", $dsc->get_option('name'); } sub dsc_files () { map { $_->{Filename} } dsc_files_info(); } sub files_compare_inputs (@) { my $inputs = \@_; my %record; my %fchecked; my $showinputs = sub { return join "; ", map { $_->get_option('name') } @$inputs; }; foreach my $in (@$inputs) { my $expected_files; my $in_name = $in->get_option('name'); printdebug "files_compare_inputs $in_name\n"; foreach my $csumi (@files_csum_info_fields) { my ($fname) = @$csumi; printdebug "files_compare_inputs $in_name $fname\n"; my $field = $in->{$fname}; next unless defined $field; my @files; foreach (split /\n/, $field) { next unless m/\S/; my ($info, $f) = m/^(\w+ \d+) (?:\S+ \S+ )?(\S+)$/ or fail "could not parse $in_name $fname line \`$_'"; printdebug "files_compare_inputs $in_name $fname $f\n"; push @files, $f; my $re = \ $record{$f}{$fname}; if (defined $$re) { $fchecked{$f}{$in_name} = 1; $$re eq $info or fail f_ "hash or size of %s varies in %s fields (between: %s)", $f, $fname, $showinputs->(); } else { $$re = $info; } } @files = sort @files; $expected_files //= \@files; "@$expected_files" eq "@files" or fail f_ "file list in %s varies between hash fields!", $in_name; } $expected_files or fail f_ "%s has no files list field(s)", $in_name; } printdebug "files_compare_inputs ".Dumper(\%fchecked, \%record) if $debuglevel>=2; grep { keys %$_ == @$inputs-1 } values %fchecked or fail f_ "no file appears in all file lists (looked in: %s)", $showinputs->(); } sub is_orig_file_in_dsc ($$) { my ($f, $dsc_files_info) = @_; return 0 if @$dsc_files_info <= 1; # One file means no origs, and the filename doesn't have a "what # part of dsc" component. (Consider versions ending `.orig'.) return 0 unless $f =~ m/\.$orig_f_tail_re$/o; return 1; } # This function determines whether a .changes file is source-only from # the point of view of dak. Thus, it permits *_source.buildinfo # files. # # It does not, however, permit any other buildinfo files. After a # source-only upload, the buildds will try to upload files like # foo_1.2.3_amd64.buildinfo. If the package maintainer included files # named like this in their (otherwise) source-only upload, the uploads # of the buildd can be rejected by dak. Fixing the resultant # situation can require manual intervention. So we block such # .buildinfo files when the user tells us to perform a source-only # upload (such as when using the push-source subcommand with the -C # option, which calls this function). # # Note, though, that when dgit is told to prepare a source-only # upload, such as when subcommands like build-source and push-source # without -C are used, dgit has a more restrictive notion of # source-only .changes than dak: such uploads will never include # *_source.buildinfo files. This is because there is no use for such # files when using a tool like dgit to produce the source package, as # dgit ensures the source is identical to git HEAD. sub test_source_only_changes ($) { my ($changes) = @_; foreach my $l (split /\n/, getfield $changes, 'Files') { $l =~ m/\S+$/ or next; # \.tar\.[a-z0-9]+ covers orig.tar and the tarballs in native packages unless ($& =~ m/(?:\.dsc|\.diff\.gz|\.tar\.[a-z0-9]+|_source\.buildinfo)$/) { print f_ "purportedly source-only changes polluted by %s\n", $&; return 0; } } return 1; } sub changes_update_origs_from_dsc ($$$$) { my ($dsc, $changes, $upstreamvsn, $changesfile) = @_; my %changes_f; printdebug "checking origs needed ($upstreamvsn)...\n"; $_ = getfield $changes, 'Files'; m/^\w+ \d+ (\S+ \S+) \S+$/m or fail __ "cannot find section/priority from .changes Files field"; my $placementinfo = $1; my %changed; printdebug "checking origs needed placement '$placementinfo'...\n"; foreach my $l (split /\n/, getfield $dsc, 'Files') { $l =~ m/\S+$/ or next; my $file = $&; printdebug "origs $file | $l\n"; next unless is_orig_file_of_vsn $file, $upstreamvsn; printdebug "origs $file is_orig\n"; my $have = archive_query('file_in_archive', $file); if (!defined $have) { print STDERR __ <{$archivefield}; $_ = $dsc->{$fname}; next unless defined; m/^(\w+) .* \Q$file\E$/m or fail f_ ".dsc %s missing entry for %s", $fname, $file; if ($h->{$archivefield} eq $1) { $same++; } else { push @differ, f_ "%s: %s (archive) != %s (local .dsc)", $archivefield, $h->{$archivefield}, $1; } } confess "$file ".Dumper($h)." ?!" if $same && @differ; $found_same++ if $same; push @found_differ, f_ "archive %s: %s", $h->{filename}, join "; ", @differ if @differ; } printdebug "origs $file f.same=$found_same". " #f._differ=$#found_differ\n"; if (@found_differ && !$found_same) { fail join "\n", (f_ "archive contains %s with different checksum", $file), @found_differ; } # Now we edit the changes file to add or remove it foreach my $csumi (@files_csum_info_fields) { my ($fname, $module, $method, $archivefield) = @$csumi; next unless defined $changes->{$fname}; if ($found_same) { # in archive, delete from .changes if it's there $changed{$file} = "removed" if $changes->{$fname} =~ s/\n.* \Q$file\E$(?:)$//m; } elsif ($changes->{$fname} =~ m/^.* \Q$file\E$(?:)$/m) { # not in archive, but it's here in the .changes } else { my $dsc_data = getfield $dsc, $fname; $dsc_data =~ m/^(.* \Q$file\E$)$/m or die "$dsc_data $file ?"; my $extra = $1; $extra =~ s/ \d+ /$&$placementinfo / or confess "$fname $extra >$dsc_data< ?" if $fname eq 'Files'; $changes->{$fname} .= "\n". $extra; $changed{$file} = "added"; } } } if (%changed) { foreach my $file (keys %changed) { progress f_ "edited .changes for archive .orig contents: %s %s", $changed{$file}, $file; } my $chtmp = "$changesfile.tmp"; $changes->save($chtmp); if (act_local()) { rename $chtmp,$changesfile or die "$changesfile $!"; } else { progress f_ "[new .changes left in %s]", $changesfile; } } else { progress f_ "%s already has appropriate .orig(s) (if any)", $changesfile; } } sub clogp_authline ($) { my ($clogp) = @_; my $author = getfield $clogp, 'Maintainer'; if ($author =~ m/^[^"\@]+\,/) { # single entry Maintainer field with unquoted comma $author = ($& =~ y/,//rd).$'; # strip the comma } # git wants a single author; any remaining commas in $author # are by now preceded by @ (or "). It seems safer to punt on # "..." for now rather than attempting to dequote or something. $author =~ s#,.*##ms unless $author =~ m/"/; my $date = cmdoutput qw(date), '+%s %z', qw(-d), getfield($clogp,'Date'); my $authline = "$author $date"; $authline =~ m/$git_authline_re/o or fail f_ "unexpected commit author line format \`%s'". " (was generated from changelog Maintainer field)", $authline; return ($1,$2,$3) if wantarray; return $authline; } sub vendor_patches_distro ($$) { my ($checkdistro, $what) = @_; return unless defined $checkdistro; my $series = "debian/patches/\L$checkdistro\E.series"; printdebug "checking for vendor-specific $series ($what)\n"; if (!open SERIES, "<", $series) { confess "$series $!" unless $!==ENOENT; return; } while () { next unless m/\S/; next if m/^\s+\#/; print STDERR __ <error; close SERIES; } sub check_for_vendor_patches () { # This dpkg-source feature doesn't seem to be documented anywhere! # But it can be found in the changelog (reformatted): # commit 4fa01b70df1dc4458daee306cfa1f987b69da58c # Author: Raphael Hertzog # Date: Sun Oct 3 09:36:48 2010 +0200 # dpkg-source: correctly create .pc/.quilt_series with alternate # series files # # If you have debian/patches/ubuntu.series and you were # unpacking the source package on ubuntu, quilt was still # directed to debian/patches/series instead of # debian/patches/ubuntu.series. # # debian/changelog | 3 +++ # scripts/Dpkg/Source/Package/V3/quilt.pm | 4 +++- # 2 files changed, 6 insertions(+), 1 deletion(-) use Dpkg::Vendor; vendor_patches_distro($ENV{DEB_VENDOR}, "DEB_VENDOR"); vendor_patches_distro(Dpkg::Vendor::get_current_vendor(), __ "Dpkg::Vendor \`current vendor'"); vendor_patches_distro(access_basedistro(), __ "(base) distro being accessed"); vendor_patches_distro(access_nomdistro(), __ "(nominal) distro being accessed"); } sub check_bpd_exists () { stat $buildproductsdir or fail f_ "build-products-dir %s is not accessible: %s\n", $buildproductsdir, $!; } sub dotdot_bpd_transfer_origs ($$$) { my ($bpd_abs, $upstreamversion, $wanted) = @_; # checks is_orig_file_of_vsn and if # calls $wanted->{$leaf} and expects boolish return if $buildproductsdir eq '..'; my $warned; my $dotdot = $maindir; $dotdot =~ s{/[^/]+$}{}; opendir DD, $dotdot or fail "opendir .. ($dotdot): $!"; while ($!=0, defined(my $leaf = readdir DD)) { { local ($debuglevel) = $debuglevel-1; printdebug "DD_BPD $leaf ?\n"; } next unless is_orig_file_of_vsn $leaf, $upstreamversion; next unless $wanted->($leaf); next if lstat "$bpd_abs/$leaf"; print STDERR f_ "%s: found orig(s) in .. missing from build-products-dir, transferring:\n", $us unless $warned++; $! == &ENOENT or fail f_ "check orig file %s in bpd %s: %s", $leaf, $bpd_abs, $!; lstat "$dotdot/$leaf" or fail f_ "check orig file %s in ..: %s", $leaf, $!; if (-l _) { stat "$dotdot/$leaf" or fail f_ "check target of orig symlink %s in ..: %s", $leaf, $!; my $ltarget = readlink "$dotdot/$leaf" or die "readlink $dotdot/$leaf: $!"; if ($ltarget !~ m{^/}) { $ltarget = "$dotdot/$ltarget"; } symlink $ltarget, "$bpd_abs/$leaf" or die "$ltarget $bpd_abs $leaf: $!"; print STDERR f_ "%s: cloned orig symlink from ..: %s\n", $us, $leaf; } elsif (link "$dotdot/$leaf", "$bpd_abs/$leaf") { print STDERR f_ "%s: hardlinked orig from ..: %s\n", $us, $leaf; } elsif ($! != EXDEV) { fail f_ "failed to make %s a hardlink to %s: %s", "$bpd_abs/$leaf", "$dotdot/$leaf", $!; } else { symlink "$bpd_abs/$leaf", "$dotdot/$leaf" or die "$bpd_abs $dotdot $leaf $!"; print STDERR f_ "%s: symmlinked orig from .. on other filesystem: %s\n", $us, $leaf; } } die "$dotdot; $!" if $!; closedir DD; } sub import_tarball_tartrees ($$) { my ($upstreamv, $dfi) = @_; # cwd should be the playground # We unpack and record the orig tarballs first, so that we only # need disk space for one private copy of the unpacked source. # But we can't make them into commits until we have the metadata # from the debian/changelog, so we record the tree objects now and # make them into commits later. my @tartrees; my $orig_f_base = srcfn $upstreamv, ''; foreach my $fi (@$dfi) { # We actually import, and record as a commit, every tarball # (unless there is only one file, in which case there seems # little point. my $f = $fi->{Filename}; printdebug "import considering $f "; (printdebug "not tar\n"), next unless $f =~ m/\.tar(\.\w+)?$/; (printdebug "signature\n"), next if $f =~ m/$orig_f_sig_re$/o; my $compr_ext = $1; my ($orig_f_part) = $f =~ m/^\Q$orig_f_base\E\.([^._]+)?\.tar(?:\.\w+)?$/; printdebug "Y ", (join ' ', map { $_//"(none)" } $compr_ext, $orig_f_part ), "\n"; my $path = $fi->{Path} // $f; my $input = new IO::File $f, '<' or die "$f $!"; my $compr_pid; my @compr_cmd; if (defined $compr_ext) { my $cname = Dpkg::Compression::compression_guess_from_filename $f; fail "Dpkg::Compression cannot handle file $f in source package" if defined $compr_ext && !defined $cname; my $compr_proc = new Dpkg::Compression::Process compression => $cname; @compr_cmd = $compr_proc->get_uncompress_cmdline(); my $compr_fh = new IO::Handle; my $compr_pid = open $compr_fh, "-|" // confess "$!"; if (!$compr_pid) { open STDIN, "<&", $input or confess "$!"; exec @compr_cmd; die "dgit (child): exec $compr_cmd[0]: $!\n"; } $input = $compr_fh; } rmtree "_unpack-tar"; mkdir "_unpack-tar" or confess "$!"; my @tarcmd = qw(tar -x -f - --no-same-owner --no-same-permissions --no-acls --no-xattrs --no-selinux); my $tar_pid = fork // confess "$!"; if (!$tar_pid) { chdir "_unpack-tar" or confess "$!"; open STDIN, "<&", $input or confess "$!"; exec @tarcmd; die f_ "dgit (child): exec %s: %s", $tarcmd[0], $!; } $!=0; (waitpid $tar_pid, 0) == $tar_pid or confess "$!"; !$? or failedcmd @tarcmd; close $input or (@compr_cmd ? ($?==SIGPIPE || failedcmd @compr_cmd) : confess "$!"); # finally, we have the results in "tarball", but maybe # with the wrong permissions runcmd qw(chmod -R +rwX _unpack-tar); changedir "_unpack-tar"; remove_stray_gits($f); mktree_in_ud_here(); my ($tree) = git_add_write_tree(); my $tentries = cmdoutput @git, qw(ls-tree -z), $tree; if ($tentries =~ m/^\d+ tree (\w+)\t[^\000]+\000$/s) { $tree = $1; printdebug "one subtree $1\n"; } else { printdebug "multiple subtrees\n"; } changedir ".."; rmtree "_unpack-tar"; my $ent = [ $f, $tree ]; push @tartrees, { Orig => !!$orig_f_part, Sort => (!$orig_f_part ? 2 : $orig_f_part =~ m/-/g ? 1 : 0), OrigPart => $orig_f_part, # 'orig', 'orig-XXX', or undef F => $f, Tree => $tree, }; } @tartrees = sort { # put any without "_" first (spec is not clear whether files # are always in the usual order). Tarballs without "_" are # the main orig or the debian tarball. $a->{Sort} <=> $b->{Sort} or $a->{F} cmp $b->{F} } @tartrees; @tartrees; } sub import_tarball_commits ($$) { my ($tartrees, $upstreamv) = @_; # cwd should be a playtree which has a relevant debian/changelog # fills in $tt->{Commit} for each one my $any_orig = grep { $_->{Orig} } @$tartrees; my @clogcmd = qw(dpkg-parsechangelog --format rfc822 --all); my $clogp; my $r1clogp; printdebug "import clog search...\n"; parsechangelog_loop \@clogcmd, (__ "package changelog"), sub { my ($thisstanza, $desc) = @_; no warnings qw(exiting); $clogp //= $thisstanza; printdebug "import clog $thisstanza->{version} $desc...\n"; last if !$any_orig; # we don't need $r1clogp # We look for the first (most recent) changelog entry whose # version number is lower than the upstream version of this # package. Then the last (least recent) previous changelog # entry is treated as the one which introduced this upstream # version and used for the synthetic commits for the upstream # tarballs. # One might think that a more sophisticated algorithm would be # necessary. But: we do not want to scan the whole changelog # file. Stopping when we see an earlier version, which # necessarily then is an earlier upstream version, is the only # realistic way to do that. Then, either the earliest # changelog entry we have seen so far is indeed the earliest # upload of this upstream version; or there are only changelog # entries relating to later upstream versions (which is not # possible unless the changelog and .dsc disagree about the # version). Then it remains to choose between the physically # last entry in the file, and the one with the lowest version # number. If these are not the same, we guess that the # versions were created in a non-monotonic order rather than # that the changelog entries have been misordered. printdebug "import clog $thisstanza->{version} vs $upstreamv...\n"; last if version_compare($thisstanza->{version}, $upstreamv) < 0; $r1clogp = $thisstanza; printdebug "import clog $r1clogp->{version} becomes r1\n"; }; $clogp or fail __ "package changelog has no entries!"; my $authline = clogp_authline $clogp; my $changes = getfield $clogp, 'Changes'; $changes =~ s/^\n//; # Changes: \n my $cversion = getfield $clogp, 'Version'; my $r1authline; if (@$tartrees) { $r1clogp //= $clogp; # maybe there's only one entry; $r1authline = clogp_authline $r1clogp; # Strictly, r1authline might now be wrong if it's going to be # unused because !$any_orig. Whatever. printdebug "import tartrees authline $authline\n"; printdebug "import tartrees r1authline $r1authline\n"; foreach my $tt (@$tartrees) { printdebug "import tartree $tt->{F} $tt->{Tree}\n"; # untranslated so that different people's imports are identical my $mbody = sprintf "Import %s", $tt->{F}; $tt->{Commit} = hash_commit_text($tt->{Orig} ? <{Tree} author $r1authline committer $r1authline $mbody [dgit import orig $tt->{F}] END_O tree $tt->{Tree} author $authline committer $authline $mbody [dgit import tarball $package $cversion $tt->{F}] END_T } } return ($authline, $r1authline, $clogp, $changes); } sub generate_commits_from_dsc () { # See big comment in fetch_from_archive, below. # See also README.dsc-import. prep_ud(); changedir $playground; my $bpd_abs = bpd_abs(); my $upstreamv = upstreamversion $dsc->{version}; my @dfi = dsc_files_info(); dotdot_bpd_transfer_origs $bpd_abs, $upstreamv, sub { grep { $_->{Filename} eq $_[0] } @dfi }; foreach my $fi (@dfi) { my $f = $fi->{Filename}; die "$f ?" if $f =~ m#/|^\.|\.dsc$|\.tmp$#; my $upper_f = "$bpd_abs/$f"; printdebug "considering reusing $f: "; if (link_ltarget "$upper_f,fetch", $f) { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { fail f_ "accessing %s: %s", "$buildproductsdir/$f,fetch", $!; } elsif (link_ltarget $upper_f, $f) { printdebug "linked.\n"; } elsif ((printdebug "($!) "), $! != ENOENT) { fail f_ "accessing %s: %s", "$buildproductsdir/$f", $!; } else { printdebug "absent.\n"; } my $refetched; complete_file_from_dsc('.', $fi, \$refetched) or next; printdebug "considering saving $f: "; if (rename_link_xf 1, $f, $upper_f) { printdebug "linked.\n"; } elsif ((printdebug "($@) "), $! != EEXIST) { fail f_ "saving %s: %s", "$buildproductsdir/$f", $@; } elsif (!$refetched) { printdebug "no need.\n"; } elsif (rename_link_xf 1, $f, "$upper_f,fetch") { printdebug "linked (using ...,fetch).\n"; } elsif ((printdebug "($@) "), $! != EEXIST) { fail f_ "saving %s: %s", "$buildproductsdir/$f,fetch", $@; } else { printdebug "cannot.\n"; } } my @tartrees; @tartrees = import_tarball_tartrees($upstreamv, \@dfi) unless @dfi == 1; # only one file in .dsc my $dscfn = "$package.dsc"; my $treeimporthow = 'package'; open D, ">", $dscfn or die "$dscfn: $!"; print D $dscdata or die "$dscfn: $!"; close D or die "$dscfn: $!"; my @cmd = qw(dpkg-source); push @cmd, '--no-check' if $dsc_checked; if (madformat $dsc->{format}) { push @cmd, '--skip-patches'; $treeimporthow = 'unpatched'; } push @cmd, qw(-x --), $dscfn; runcmd @cmd; my ($tree,$dir) = mktree_in_ud_from_only_subdir(__ "source package"); if (madformat $dsc->{format}) { check_for_vendor_patches(); } my $dappliedtree; if (madformat $dsc->{format}) { my @pcmd = qw(dpkg-source --before-build .); runcmd shell_cmd 'exec >/dev/null', @pcmd; rmtree '.pc'; $dappliedtree = git_add_write_tree(); } my ($authline, $r1authline, $clogp, $changes) = import_tarball_commits(\@tartrees, $upstreamv); my $cversion = getfield $clogp, 'Version'; printdebug "import main commit\n"; open C, ">../commit.tmp" or confess "$!"; print C <{Commit} END print C <{format}) { printdebug "import apply patches...\n"; # regularise the state of the working tree so that # the checkout of $rawimport_hash works nicely. my $dappliedcommit = hash_commit_text(</dev/null 2>>../../gbp-pq-output', @showcmd; debugcmd "+",@realcmd; if (system @realcmd) { die f_ "%s failed: %s\n", +(shellquote @showcmd), failedcmd_waitstatus(); } my $gapplied = git_rev_parse('HEAD'); my $gappliedtree = cmdoutput @git, qw(rev-parse HEAD:); $gappliedtree eq $dappliedtree or fail f_ < $rawimport_hash, Info => __ "Import of source package", }; my @output = ($rawimport_mergeinput); if ($lastpush_mergeinput) { my $oldclogp = mergeinfo_getclogp($lastpush_mergeinput); my $oversion = getfield $oldclogp, 'Version'; my $vcmp = version_compare($oversion, $cversion); if ($vcmp < 0) { @output = ($rawimport_mergeinput, $lastpush_mergeinput, { ReverseParents => 1, # untranslated so that different people's pseudomerges # are not needlessly different (although they will # still differ if the series of pulls is different) Message => (sprintf < 0) { print STDERR f_ <{Filename}" # and will set $$refetched=1 if it did so (or tried to). my $f = $fi->{Filename}; my $tf = "$dstdir/$f"; my $downloaded = 0; my $got; my $checkhash = sub { open F, "<", "$tf" or die "$tf: $!"; $fi->{Digester}->reset(); $fi->{Digester}->addfile(*F); F->error and confess "$!"; $got = $fi->{Digester}->hexdigest(); return $got eq $fi->{Hash}; }; if (stat_exists $tf) { if ($checkhash->()) { progress f_ "using existing %s", $f; return 1; } if (!$refetched) { fail f_ "file %s has hash %s but .dsc demands hash %s". " (perhaps you should delete this file?)", $f, $got, $fi->{Hash}; } progress f_ "need to fetch correct version of %s", $f; unlink $tf or die "$tf $!"; $$refetched = 1; } else { printdebug "$tf does not exist, need to fetch\n"; } my $furl = $dscurl; $furl =~ s{/[^/]+$}{}; $furl .= "/$f"; die "$f ?" unless $f =~ m/^\Q${package}\E_/; die "$f ?" if $f =~ m#/#; runcmd_ordryrun_local @curl,qw(-f -o),$tf,'--',"$furl"; return 0 if !act_local(); $checkhash->() or fail f_ "file %s has hash %s but .dsc demands hash %s". " (got wrong file from archive!)", $f, $got, $fi->{Hash}; return 1; } sub ensure_we_have_orig () { my @dfi = dsc_files_info(); foreach my $fi (@dfi) { my $f = $fi->{Filename}; next unless is_orig_file_in_dsc($f, \@dfi); complete_file_from_dsc($buildproductsdir, $fi) or next; } } #---------- git fetch ---------- sub lrfetchrefs () { return "refs/dgit-fetch/".access_basedistro(); } sub lrfetchref () { return lrfetchrefs.'/'.server_branch($csuite); } # We fetch some parts of lrfetchrefs/*. Ideally we delete these # locally fetched refs because they have unhelpful names and clutter # up gitk etc. So we track whether we have "used up" head ref (ie, # whether we have made another local ref which refers to this object). # # (If we deleted them unconditionally, then we might end up # re-fetching the same git objects each time dgit fetch was run.) # # So, each use of lrfetchrefs needs to be accompanied by arrangements # in git_fetch_us to fetch the refs in question, and possibly a call # to lrfetchref_used. our (%lrfetchrefs_f, %lrfetchrefs_d); # $lrfetchrefs_X{lrfetchrefs."/heads/whatever"} = $objid sub lrfetchref_used ($) { my ($fullrefname) = @_; my $objid = $lrfetchrefs_f{$fullrefname}; $lrfetchrefs_d{$fullrefname} = $objid if defined $objid; } sub git_lrfetch_sane { my ($url, $supplementary, @specs) = @_; # Make a 'refs/'.lrfetchrefs.'/*' be just like on server, # at least as regards @specs. Also leave the results in # %lrfetchrefs_f, and arrange for lrfetchref_used to be # able to clean these up. # # With $supplementary==1, @specs must not contain wildcards # and we add to our previous fetches (non-atomically). # This is rather miserable: # When git fetch --prune is passed a fetchspec ending with a *, # it does a plausible thing. If there is no * then: # - it matches subpaths too, even if the supplied refspec # starts refs, and behaves completely madly if the source # has refs/refs/something. (See, for example, Debian #NNNN.) # - if there is no matching remote ref, it bombs out the whole # fetch. # We want to fetch a fixed ref, and we don't know in advance # if it exists, so this is not suitable. # # Our workaround is to use git ls-remote. git ls-remote has its # own qairks. Notably, it has the absurd multi-tail-matching # behaviour: git ls-remote R refs/foo can report refs/foo AND # refs/refs/foo etc. # # Also, we want an idempotent snapshot, but we have to make two # calls to the remote: one to git ls-remote and to git fetch. The # solution is use git ls-remote to obtain a target state, and # git fetch to try to generate it. If we don't manage to generate # the target state, we try again. printdebug "git_lrfetch_sane suppl=$supplementary specs @specs\n"; my $specre = join '|', map { my $x = $_; $x =~ s/\W/\\$&/g; my $wildcard = $x =~ s/\\\*$/.*/; die if $wildcard && $supplementary; "(?:refs/$x)"; } @specs; printdebug "git_lrfetch_sane specre=$specre\n"; my $wanted_rref = sub { local ($_) = @_; return m/^(?:$specre)$/; }; my $fetch_iteration = 0; FETCH_ITERATION: for (;;) { printdebug "git_lrfetch_sane iteration $fetch_iteration\n"; if (++$fetch_iteration > 10) { fail __ "too many iterations trying to get sane fetch!"; } my @look = map { "refs/$_" } @specs; my @lcmd = (@git, qw(ls-remote -q --refs), $url, @look); debugcmd "|",@lcmd; my %wantr; open GITLS, "-|", @lcmd or confess "$!"; while () { printdebug "=> ", $_; m/^(\w+)\s+(\S+)\n/ or die "ls-remote $_ ?"; my ($objid,$rrefname) = ($1,$2); if (!$wanted_rref->($rrefname)) { print STDERR f_ <($rrefname)) { printdebug <'; my $want = $wantr{$rrefname}; next if $got eq $want; if (!defined $objgot{$want}) { fail __ <{Clogp} exists and returns it my ($mi) = @_; $mi->{Clogp} = commit_getclogp($mi->{Commit}); } sub mergeinfo_version ($) { return getfield( (mergeinfo_getclogp $_[0]), 'Version' ); } sub fetch_from_archive_record_1 ($) { my ($hash) = @_; runcmd git_update_ref_cmd "dgit fetch $csuite", 'DGIT_ARCHIVE', $hash; cmdoutput @git, qw(log -n2), $hash; # ... gives git a chance to complain if our commit is malformed } sub fetch_from_archive_record_2 ($) { my ($hash) = @_; my @upd_cmd = (git_update_ref_cmd 'dgit fetch', lrref(), $hash); if (act_local()) { cmdoutput @upd_cmd; } else { dryrun_report @upd_cmd; } } sub parse_dsc_field_def_dsc_distro () { $dsc_distro //= cfg qw(dgit.default.old-dsc-distro dgit.default.distro); } sub parse_dsc_field ($$) { my ($dsc, $what) = @_; my $f; foreach my $field (@ourdscfield) { $f = $dsc->{$field}; last if defined $f; } if (!defined $f) { progress f_ "%s: NO git hash", $what; parse_dsc_field_def_dsc_distro(); } elsif (($dsc_hash, $dsc_distro, $dsc_hint_tag, $dsc_hint_url) = $f =~ m/^(\w+)\s+($distro_re)\s+($versiontag_re)\s+(\S+)(?:\s|$)/) { progress f_ "%s: specified git info (%s)", $what, $dsc_distro; $dsc_hint_tag = [ $dsc_hint_tag ]; } elsif ($f =~ m/^\w+\s*$/) { $dsc_hash = $&; parse_dsc_field_def_dsc_distro(); $dsc_hint_tag = [ debiantags +(getfield $dsc, 'Version'), $dsc_distro ]; progress f_ "%s: specified git hash", $what; } else { fail f_ "%s: invalid Dgit info", $what; } } sub resolve_dsc_field_commit ($$) { my ($already_distro, $already_mapref) = @_; return unless defined $dsc_hash; my $mapref = defined $already_mapref && ($already_distro eq $dsc_distro || !$chase_dsc_distro) ? $already_mapref : undef; my $do_fetch; $do_fetch = sub { my ($what, @fetch) = @_; local $idistro = $dsc_distro; my $lrf = lrfetchrefs; if (!$chase_dsc_distro) { progress f_ "not chasing .dsc distro %s: not fetching %s", $dsc_distro, $what; return 0; } progress f_ ".dsc names distro %s: fetching %s", $dsc_distro, $what; my $url = access_giturl(); if (!defined $url) { defined $dsc_hint_url or fail f_ <((__ "rewrite map"), $rewritemap) or return; $mapref = $lrf.'/'.$rewritemap; } my $rewritemapdata = git_cat_file $mapref.':map'; if (defined $rewritemapdata && $rewritemapdata =~ m/^$dsc_hash(?:[ \t](\w+))/m) { progress __ "server's git history rewrite map contains a relevant entry!"; $dsc_hash = $1; if (defined $dsc_hash) { progress __ "using rewritten git hash in place of .dsc value"; } else { progress __ "server data says .dsc hash is to be disregarded"; } } } if (!defined git_cat_file $dsc_hash) { my @tags = map { "tags/".$_ } @$dsc_hint_tag; my $lrf = $do_fetch->((__ "additional commits"), @tags) && defined git_cat_file $dsc_hash or fail f_ < $lastpush_hash, Info => (__ "dgit suite branch on dgit git server"), }; my $lastfetch_hash = git_get_ref(lrref()); printdebug "fetch_from_archive: lastfetch=$lastfetch_hash\n"; my $lastfetch_mergeinput = $lastfetch_hash && { Commit => $lastfetch_hash, Info => (__ "dgit client's archive history view"), }; my $dsc_mergeinput = $dsc_hash && { Commit => $dsc_hash, Info => (__ "Dgit field in .dsc from archive"), }; my $cwd = getcwd(); my $del_lrfetchrefs = sub { changedir $cwd; my $gur; printdebug "del_lrfetchrefs...\n"; foreach my $fullrefname (sort keys %lrfetchrefs_d) { my $objid = $lrfetchrefs_d{$fullrefname}; printdebug "del_lrfetchrefs: $objid $fullrefname\n"; if (!$gur) { $gur ||= new IO::Handle; open $gur, "|-", qw(git update-ref --stdin) or confess "$!"; } printf $gur "delete %s %s\n", $fullrefname, $objid; } if ($gur) { close $gur or failedcmd "git update-ref delete lrfetchrefs"; } }; if (defined $dsc_hash) { ensure_we_have_orig(); if (!$lastpush_hash || $dsc_hash eq $lastpush_hash) { @mergeinputs = $dsc_mergeinput } elsif (is_fast_fwd($dsc_hash,$lastpush_hash)) { print STDERR f_ <{Commit}; $h and is_fast_fwd($lastfetch_hash, $h); # If true, one of the existing parents of this commit # is a descendant of the $lastfetch_hash, so we'll # be ff from that automatically. } @mergeinputs ) { # Otherwise: push @mergeinputs, $lastfetch_mergeinput; } printdebug "fetch mergeinfos:\n"; foreach my $mi (@mergeinputs) { if ($mi->{Info}) { printdebug " commit $mi->{Commit} $mi->{Info}\n"; } else { printdebug sprintf " ReverseParents=%d Message=%s", $mi->{ReverseParents}, $mi->{Message}; } } my $compat_info= pop @mergeinputs if $mergeinputs[$#mergeinputs]{Message}; @mergeinputs = grep { defined $_->{Commit} } @mergeinputs; my $hash; if (@mergeinputs > 1) { # here we go, then: my $tree_commit = $mergeinputs[0]{Commit}; my $tree = get_tree_of_commit $tree_commit;; # We use the changelog author of the package in question the # author of this pseudo-merge. This is (roughly) correct if # this commit is simply representing aa non-dgit upload. # (Roughly because it does not record sponsorship - but we # don't have sponsorship info because that's in the .changes, # which isn't in the archivw.) # # But, it might be that we are representing archive history # updates (including in-archive copies). These are not really # the responsibility of the person who created the .dsc, but # there is no-one whose name we should better use. (The # author of the .dsc-named commit is clearly worse.) my $useclogp = mergeinfo_getclogp $mergeinputs[0]; my $author = clogp_authline $useclogp; my $cversion = getfield $useclogp, 'Version'; my $mcf = dgit_privdir()."/mergecommit"; open MC, ">", $mcf or die "$mcf $!"; print MC <{Commit} } @mergeinputs; @parents = reverse @parents if $compat_info->{ReverseParents}; print MC <{Commit} END print MC <{Message}) { print MC $compat_info->{Message} or confess "$!"; } else { print MC f_ <{Info} or confess "$!"; }; $message_add_info->($mergeinputs[0]); print MC __ <($_) foreach @mergeinputs[1..$#mergeinputs]; } close MC or confess "$!"; $hash = hash_commit $mcf; } else { $hash = $mergeinputs[0]{Commit}; } printdebug "fetch hash=$hash\n"; my $chkff = sub { my ($lasth, $what) = @_; return unless $lasth; confess "$lasth $hash $what ?" unless is_fast_fwd($lasth, $hash); }; $chkff->($lastpush_hash, __ 'dgit repo server tip (last push)') if $lastpush_hash; $chkff->($lastfetch_hash, __ 'local tracking tip (last fetch)'); fetch_from_archive_record_1($hash); if (defined $skew_warning_vsn) { printdebug "SKEW CHECK WANT $skew_warning_vsn\n"; my $gotclogp = commit_getclogp($hash); my $got_vsn = getfield $gotclogp, 'Version'; printdebug "SKEW CHECK GOT $got_vsn\n"; if (version_compare($got_vsn, $skew_warning_vsn) < 0) { print STDERR f_ <", "$attrs.new" or die "$attrs.new $!"; if (!open ATTRS, "<", $attrs) { $!==ENOENT or die "$attrs: $!"; } else { while () { chomp; next if m{^debian/changelog\s}; print NATTRS $_, "\n" or confess "$!"; } ATTRS->error and confess "$!"; close ATTRS; } print NATTRS "debian/changelog merge=$driver\n" or confess "$!"; close NATTRS; set_local_git_config "$cb.name", __ 'debian/changelog merge driver'; set_local_git_config "$cb.driver", 'dpkg-mergechangelogs -m %O %A %B %A'; rename "$attrs.new", "$attrs" or die "$attrs: $!"; } sub setup_useremail (;$) { my ($always) = @_; return unless $always || access_cfg_bool(1, 'setup-useremail'); my $setup = sub { my ($k, $envvar) = @_; my $v = access_cfg("user-$k", 'RETURN-UNDEF') // $ENV{$envvar}; return unless defined $v; set_local_git_config "user.$k", $v; }; $setup->('email', 'DEBEMAIL'); $setup->('name', 'DEBFULLNAME'); } sub ensure_setup_existing_tree () { my $k = "remote.$remotename.skipdefaultupdate"; my $c = git_get_config $k; return if defined $c; set_local_git_config $k, 'true'; } sub open_main_gitattrs () { confess 'internal error no maindir' unless defined $maindir; my $gai = new IO::File "$maindir_gitcommon/info/attributes" or $!==ENOENT or die "open $maindir_gitcommon/info/attributes: $!"; return $gai; } our $gitattrs_ourmacro_re = qr{^\[attr\]dgit-defuse-attrs\s}; sub is_gitattrs_setup () { # return values: # trueish # 1: gitattributes set up and should be left alone # falseish # 0: there is a dgit-defuse-attrs but it needs fixing # undef: there is none my $gai = open_main_gitattrs(); return 0 unless $gai; while (<$gai>) { next unless m{$gitattrs_ourmacro_re}; return 1 if m{\s-working-tree-encoding\s}; printdebug "is_gitattrs_setup: found old macro\n"; return 0; } $gai->error and confess "$!"; printdebug "is_gitattrs_setup: found nothing\n"; return undef; } sub setup_gitattrs (;$) { my ($always) = @_; return unless $always || access_cfg_bool(1, 'setup-gitattributes'); my $already = is_gitattrs_setup(); if ($already) { progress __ < $af.new" or confess "$!"; print GAO <) { if (m{$gitattrs_ourmacro_re}) { die unless defined $already; $_ = $new; } chomp; print GAO $_, "\n" or confess "$!"; } $gai->error and confess "$!"; } close GAO or confess "$!"; rename "$af.new", "$af" or fail f_ "install %s: %s", $af, $!; } sub setup_new_tree () { setup_mergechangelogs(); setup_useremail(); setup_gitattrs(); } sub check_gitattrs ($$) { my ($treeish, $what) = @_; return if is_gitattrs_setup; local $/="\0"; my @cmd = (@git, qw(ls-tree -lrz --), "${treeish}:"); debugcmd "|",@cmd; my $gafl = new IO::File; open $gafl, "-|", @cmd or confess "$!"; while (<$gafl>) { chomp or die; s/^\d+\s+\w+\s+\w+\s+(\d+)\t// or die; next if $1 == 0; next unless m{(?:^|/)\.gitattributes$}; # oh dear, found one print STDERR f_ <(), and returns undef # in parent, returns canonical suite name for $tsuite my $canonsuitefh = IO::File::new_tmpfile; my $pid = fork // confess "$!"; if (!$pid) { forkcheck_setup(); $isuite = $tsuite; $us .= " [$isuite]"; $debugprefix .= " "; progress f_ "fetching %s...", $tsuite; canonicalise_suite(); print $canonsuitefh $csuite, "\n" or confess "$!"; close $canonsuitefh or confess "$!"; $fn->(); return undef; } waitpid $pid,0 == $pid or confess "$!"; fail f_ "failed to obtain %s: %s", $tsuite, waitstatusmsg() if $? && $?!=256*4; seek $canonsuitefh,0,0 or confess "$!"; local $csuite = <$canonsuitefh>; confess "$!" unless defined $csuite && chomp $csuite; if ($? == 256*4) { printdebug "multisuite $tsuite missing\n"; return $csuite; } printdebug "multisuite $tsuite ok (canon=$csuite)\n"; push @$mergeinputs, { Ref => lrref, Info => $csuite, }; return $csuite; } sub fork_for_multisuite ($) { my ($before_fetch_merge) = @_; # if nothing unusual, just returns '' # # if multisuite: # returns 0 to caller in child, to do first of the specified suites # in child, $csuite is not yet set # # returns 1 to caller in parent, to finish up anything needed after # in parent, $csuite is set to canonicalised portmanteau my $org_isuite = $isuite; my @suites = split /\,/, $isuite; return '' unless @suites > 1; printdebug "fork_for_multisuite: @suites\n"; my @mergeinputs; my $cbasesuite = multisuite_suite_child($suites[0], \@mergeinputs, sub { }); return 0 unless defined $cbasesuite; fail f_ "package %s missing in (base suite) %s", $package, $cbasesuite unless @mergeinputs; my @csuites = ($cbasesuite); $before_fetch_merge->(); foreach my $tsuite (@suites[1..$#suites]) { $tsuite =~ s/^-/$cbasesuite-/; my $csubsuite = multisuite_suite_child($tsuite, \@mergeinputs, sub { @end = (); fetch_one(); finish 0; }); $csubsuite =~ s/^\Q$cbasesuite\E-/-/; push @csuites, $csubsuite; } foreach my $mi (@mergeinputs) { my $ref = git_get_ref $mi->{Ref}; die "$mi->{Ref} ?" unless length $ref; $mi->{Commit} = $ref; } $csuite = join ",", @csuites; my $previous = git_get_ref lrref; if ($previous) { unshift @mergeinputs, { Commit => $previous, Info => (__ "local combined tracking branch"), Warning => (__ "archive seems to have rewound: local tracking branch is ahead!"), }; } foreach my $ix (0..$#mergeinputs) { $mergeinputs[$ix]{Index} = $ix; } @mergeinputs = sort { -version_compare(mergeinfo_version $a, mergeinfo_version $b) # highest version first or $a->{Index} <=> $b->{Index}; # earliest in spec first } @mergeinputs; my @needed; NEEDED: foreach my $mi (@mergeinputs) { printdebug "multisuite merge check $mi->{Info}\n"; foreach my $previous (@needed) { next unless is_fast_fwd $mi->{Commit}, $previous->{Commit}; printdebug "multisuite merge un-needed $previous->{Info}\n"; next NEEDED; } push @needed, $mi; printdebug "multisuite merge this-needed\n"; $mi->{Character} = '+'; } $needed[0]{Character} = '*'; my $output = $needed[0]{Commit}; if (@needed > 1) { printdebug "multisuite merge nontrivial\n"; my $tree = cmdoutput qw(git rev-parse), $needed[0]{Commit}.':'; my $commit = "tree $tree\n"; my $msg = f_ "Combine archive branches %s [dgit]\n\n". "Input branches:\n", $csuite; foreach my $mi (sort { $a->{Index} <=> $b->{Index} } @mergeinputs) { printdebug "multisuite merge include $mi->{Info}\n"; $mi->{Character} //= ' '; $commit .= "parent $mi->{Commit}\n"; $msg .= sprintf " %s %-25s %s\n", $mi->{Character}, (mergeinfo_version $mi), $mi->{Info}; } my $authline = clogp_authline mergeinfo_getclogp $needed[0]; $msg .= __ "\nKey\n". " * marks the highest version branch, which choose to use\n". " + marks each branch which was not already an ancestor\n\n"; $msg .= "[dgit multi-suite $csuite]\n"; $commit .= "author $authline\n". "committer $authline\n\n"; $output = hash_commit_text $commit.$msg; printdebug "multisuite merge generated $output\n"; } fetch_from_archive_record_1($output); fetch_from_archive_record_2($output); progress f_ "calculated combined tracking suite %s", $csuite; return 1; } sub clone_set_head () { open H, "> .git/HEAD" or confess "$!"; print H "ref: ".lref()."\n" or confess "$!"; close H or confess "$!"; } sub clone_finish ($) { my ($dstdir) = @_; runcmd @git, qw(reset --hard), lrref(); runcmd qw(bash -ec), <<'END'; set -o pipefail git ls-tree -r --name-only -z HEAD | \ xargs -0r touch -h -r . -- END printdone f_ "ready for work in %s", $dstdir; } sub clone ($) { # in multisuite, returns twice! # once in parent after first suite fetched, # and then again in child after everything is finished my ($dstdir) = @_; badusage __ "dry run makes no sense with clone" unless act_local(); my $multi_fetched = fork_for_multisuite(sub { printdebug "multi clone before fetch merge\n"; changedir $dstdir; record_maindir(); }); if ($multi_fetched) { printdebug "multi clone after fetch merge\n"; clone_set_head(); clone_finish($dstdir); return; } printdebug "clone main body\n"; mkdir $dstdir or fail f_ "create \`%s': %s", $dstdir, $!; changedir $dstdir; check_bpd_exists(); canonicalise_suite(); my $hasgit = check_for_git(); runcmd @git, qw(init -q); record_maindir(); setup_new_tree(); clone_set_head(); my $giturl = access_giturl(1); if (defined $giturl) { runcmd @git, qw(remote add), 'origin', $giturl; } if ($hasgit) { progress __ "fetching existing git history"; git_fetch_us(); runcmd_ordryrun_local @git, qw(fetch origin); } else { progress __ "starting new git history"; } fetch_from_archive() or no_such_package; my $vcsgiturl = $dsc->{'Vcs-Git'}; if (length $vcsgiturl) { $vcsgiturl =~ s/\s+-b\s+\S+//g; runcmd @git, qw(remote add vcs-git), $vcsgiturl; } clone_finish($dstdir); } sub fetch_one () { canonicalise_suite(); if (check_for_git()) { git_fetch_us(); } fetch_from_archive() or no_such_package(); my $vcsgiturl = $dsc && $dsc->{'Vcs-Git'}; if (length $vcsgiturl and (grep { $csuite eq $_ } split /\;/, cfg 'dgit.vcs-git.suites')) { my $current = cfg 'remote.vcs-git.url', 'RETURN-UNDEF'; if (defined $current && $current ne $vcsgiturl) { print STDERR f_ <) { next if m/^\s*\#/; next unless m/\S/; s/\s+$//; # ignore missing final newline if (m/\s*\#\s*/) { my ($k, $v) = ($`, $'); #'); $v =~ s/^"(.*)"$/$1/; $options{$k} = $v; } else { $options{$_} = 1; } } F->error and confess "$!"; close F; } else { confess "$!" unless $!==&ENOENT; } if (!open F, "debian/source/format") { confess "$!" unless $!==&ENOENT; return ''; } $_ = ; F->error and confess "$!"; chomp; return ($_, \%options); } sub madformat_wantfixup ($) { my ($format) = @_; return 0 unless $format eq '3.0 (quilt)'; our $quilt_mode_warned; if ($quilt_mode eq 'nocheck') { progress f_ "Not doing any fixup of \`%s'". " due to ----no-quilt-fixup or --quilt=nocheck", $format unless $quilt_mode_warned++; return 0; } progress f_ "Format \`%s', need to check/update patch stack", $format unless $quilt_mode_warned++; return 1; } sub maybe_split_brain_save ($$$) { my ($headref, $dgitview, $msg) = @_; # => message fragment "$saved" describing disposition of $dgitview # (used inside parens, in the English texts) my $save = $internal_object_save{'dgit-view'}; return f_ "commit id %s", $dgitview unless defined $save; my @cmd = (shell_cmd 'cd "$1"; shift', $maindir, git_update_ref_cmd "dgit --dgit-view-save $msg HEAD=$headref", $save, $dgitview); runcmd @cmd; return f_ "and left in %s", $save; } # An "infopair" is a tuple [ $thing, $what ] # (often $thing is a commit hash; $what is a description) sub infopair_cond_equal ($$) { my ($x,$y) = @_; $x->[0] eq $y->[0] or fail <[1] ($x->[0]) not equal to $y->[1] ($y->[0]) END }; sub infopair_lrf_tag_lookup ($$) { my ($tagnames, $what) = @_; # $tagname may be an array ref my @tagnames = ref $tagnames ? @$tagnames : ($tagnames); printdebug "infopair_lrfetchref_tag_lookup $what @tagnames\n"; foreach my $tagname (@tagnames) { my $lrefname = lrfetchrefs."/tags/$tagname"; my $tagobj = $lrfetchrefs_f{$lrefname}; next unless defined $tagobj; printdebug "infopair_lrfetchref_tag_lookup $tagobj $tagname $what\n"; return [ git_rev_parse($tagobj), $what ]; } fail @tagnames==1 ? (f_ <[0], $desc->[0]) or fail f_ <[1], $anc->[0], $desc->[1], $desc->[0]; %s (%s) .. %s (%s) is not fast forward END }; sub pseudomerge_version_check ($$) { my ($clogp, $archive_hash) = @_; my $arch_clogp = commit_getclogp $archive_hash; my $i_arch_v = [ (getfield $arch_clogp, 'Version'), __ 'version currently in archive' ]; if (defined $overwrite_version) { if (length $overwrite_version) { infopair_cond_equal([ $overwrite_version, '--overwrite= version' ], $i_arch_v); } else { my $v = $i_arch_v->[0]; progress f_ "Checking package changelog for archive version %s ...", $v; my $cd; eval { my @xa = ("-f$v", "-t$v"); my $vclogp = parsechangelog @xa; my $gf = sub { my ($fn) = @_; [ (getfield $vclogp, $fn), (f_ "%s field from dpkg-parsechangelog %s", $fn, "@xa") ]; }; my $cv = $gf->('Version'); infopair_cond_equal($i_arch_v, $cv); $cd = $gf->('Distribution'); }; if ($@) { $@ =~ s/^\n//s; $@ =~ s/^dgit: //gm; fail "$@". f_ "Perhaps debian/changelog does not mention %s ?", $v; } fail f_ <[1], $cd->[0], $v %s is %s Your tree seems to based on earlier (not uploaded) %s. END if $cd->[0] =~ m/UNRELEASED/; } } printdebug "pseudomerge_version_check i_arch_v @$i_arch_v\n"; return $i_arch_v; } sub pseudomerge_hash_commit ($$$$ $$) { my ($clogp, $dgitview, $archive_hash, $i_arch_v, $msg_cmd, $msg_msg) = @_; progress f_ "Declaring that HEAD includes all changes in %s...", $i_arch_v->[0]; my $tree = cmdoutput qw(git rev-parse), "${dgitview}:"; my $authline = clogp_authline $clogp; chomp $msg_msg; $msg_cmd .= !defined $overwrite_version ? "" : !length $overwrite_version ? " --overwrite" : " --overwrite=".$overwrite_version; # Contributing parent is the first parent - that makes # git rev-list --first-parent DTRT. my $pmf = dgit_privdir()."/pseudomerge"; open MC, ">", $pmf or die "$pmf $!"; print MC < $merged_dgitview printdebug "splitbrain_pseudomerge...\n"; # # We: debian/PREVIOUS HEAD($maintview) # expect: o ----------------- o # \ \ # o o # a/d/PREVIOUS $dgitview # $archive_hash \ # If so, \ \ # we do: `------------------ o # this: $dgitview' # return $dgitview unless defined $archive_hash; return $dgitview if deliberately_not_fast_forward(); printdebug "splitbrain_pseudomerge...\n"; my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash); if (!defined $overwrite_version) { progress __ "Checking that HEAD includes all changes in archive..."; } return $dgitview if is_fast_fwd $archive_hash, $dgitview; if (defined $overwrite_version) { } elsif (!eval { my $t_dep14 = debiantag_maintview $i_arch_v->[0], access_nomdistro; my $i_dep14 = infopair_lrf_tag_lookup($t_dep14, __ "maintainer view tag"); my $t_dgit = debiantag_new $i_arch_v->[0], access_nomdistro; my $i_dgit = infopair_lrf_tag_lookup($t_dgit, __ "dgit view tag"); my $i_archive = [ $archive_hash, __ "current archive contents" ]; printdebug "splitbrain_pseudomerge i_archive @$i_archive\n"; infopair_cond_equal($i_dgit, $i_archive); infopair_cond_ff($i_dep14, $i_dgit); infopair_cond_ff($i_dep14, [ $maintview, 'HEAD' ]); 1; }) { $@ =~ s/^\n//; chomp $@; print STDERR <[0]; my $r = pseudomerge_hash_commit $clogp, $dgitview, $archive_hash, $i_arch_v, "dgit --quilt=$quilt_mode", (defined $overwrite_version ? f_ "Declare fast forward from %s\n", $arch_v : f_ "Make fast forward from %s\n", $arch_v); maybe_split_brain_save $maintview, $r, "pseudomerge"; progress f_ "Made pseudo-merge of %s into dgit view.", $arch_v; return $r; } sub plain_overwrite_pseudomerge ($$$) { my ($clogp, $head, $archive_hash) = @_; printdebug "plain_overwrite_pseudomerge..."; my $i_arch_v = pseudomerge_version_check($clogp, $archive_hash); return $head if is_fast_fwd $archive_hash, $head; my $m = f_ "Declare fast forward from %s", $i_arch_v->[0]; my $r = pseudomerge_hash_commit $clogp, $head, $archive_hash, $i_arch_v, "dgit", $m; runcmd git_update_ref_cmd $m, 'HEAD', $r, $head; progress f_ "Make pseudo-merge of %s into your HEAD.", $i_arch_v->[0]; return $r; } sub push_parse_changelog ($) { my ($clogpfn) = @_; my $clogp = Dpkg::Control::Hash->new(); $clogp->load($clogpfn) or die; my $clogpackage = getfield $clogp, 'Source'; $package //= $clogpackage; fail f_ "-p specified %s but changelog specified %s", $package, $clogpackage unless $package eq $clogpackage; my $cversion = getfield $clogp, 'Version'; if (!$we_are_initiator) { # rpush initiator can't do this because it doesn't have $isuite yet my $tag = debiantag_new($cversion, access_nomdistro); runcmd @git, qw(check-ref-format), $tag; } my $dscfn = dscfn($cversion); return ($clogp, $cversion, $dscfn); } sub push_parse_dsc ($$$) { my ($dscfn,$dscfnwhat, $cversion) = @_; $dsc = parsecontrol($dscfn,$dscfnwhat); my $dversion = getfield $dsc, 'Version'; my $dscpackage = getfield $dsc, 'Source'; ($dscpackage eq $package && $dversion eq $cversion) or fail f_ "%s is for %s %s but debian/changelog is for %s %s", $dscfn, $dscpackage, $dversion, $package, $cversion; } sub push_tagwants ($$$$) { my ($cversion, $dgithead, $maintviewhead, $tfbase) = @_; my @tagwants; push @tagwants, { TagFn => \&debiantag_new, Objid => $dgithead, TfSuffix => '', View => 'dgit', }; if (defined $maintviewhead) { push @tagwants, { TagFn => \&debiantag_maintview, Objid => $maintviewhead, TfSuffix => '-maintview', View => 'maint', }; } elsif ($dodep14tag ne 'no') { push @tagwants, { TagFn => \&debiantag_maintview, Objid => $dgithead, TfSuffix => '-dgit', View => 'dgit', }; }; foreach my $tw (@tagwants) { $tw->{Tag} = $tw->{TagFn}($cversion, access_nomdistro); $tw->{Tfn} = sub { $tfbase.$tw->{TfSuffix}.$_[0]; }; } printdebug 'push_tagwants: ', Dumper(\@_, \@tagwants); return @tagwants; } sub push_mktags ($$ $$ $) { my ($clogp,$dscfn, $changesfile,$changesfilewhat, $tagwants) = @_; die unless $tagwants->[0]{View} eq 'dgit'; my $declaredistro = access_nomdistro(); my $reader_giturl = do { local $access_forpush=0; access_giturl(); }; $dsc->{$ourdscfield[0]} = join " ", $tagwants->[0]{Objid}, $declaredistro, $tagwants->[0]{Tag}, $reader_giturl; $dsc->save("$dscfn.tmp") or confess "$!"; my $changes = parsecontrol($changesfile,$changesfilewhat); foreach my $field (qw(Source Distribution Version)) { $changes->{$field} eq $clogp->{$field} or fail f_ "changes field %s \`%s' does not match changelog \`%s'", $field, $changes->{$field}, $clogp->{$field}; } my $cversion = getfield $clogp, 'Version'; my $clogsuite = getfield $clogp, 'Distribution'; my $format = getfield $dsc, 'Format'; # We make the git tag by hand because (a) that makes it easier # to control the "tagger" (b) we can do remote signing my $authline = clogp_authline $clogp; my $mktag = sub { my ($tw) = @_; my $tfn = $tw->{Tfn}; my $head = $tw->{Objid}; my $tag = $tw->{Tag}; open TO, '>', $tfn->('.tmp') or confess "$!"; print TO <{View} eq 'dgit') { print TO sprintf <{View} eq 'maint') { print TO sprintf <('.tmp'); if ($sign) { if (!defined $keyid) { $keyid = access_cfg('keyid','RETURN-UNDEF'); } if (!defined $keyid) { $keyid = getfield $clogp, 'Maintainer'; } unlink $tfn->('.tmp.asc') or $!==&ENOENT or confess "$!"; my @sign_cmd = (@gpg, qw(--detach-sign --armor)); push @sign_cmd, qw(-u),$keyid if defined $keyid; push @sign_cmd, $tfn->('.tmp'); runcmd_ordryrun @sign_cmd; if (act_scary()) { $tagobjfn = $tfn->('.signed.tmp'); runcmd shell_cmd "exec >$tagobjfn", qw(cat --), $tfn->('.tmp'), $tfn->('.tmp.asc'); } } return $tagobjfn; }; my @r = map { $mktag->($_); } @$tagwants; return @r; } sub sign_changes ($) { my ($changesfile) = @_; if ($sign) { my @debsign_cmd = @debsign; push @debsign_cmd, "-k$keyid" if defined $keyid; push @debsign_cmd, "-p$gpg[0]" if $gpg[0] ne 'gpg'; push @debsign_cmd, $changesfile; runcmd_ordryrun @debsign_cmd; } } sub dopush () { printdebug "actually entering push\n"; supplementary_message(__ <<'END'); Push failed, while checking state of the archive. You can retry the push, after fixing the problem, if you like. END if (check_for_git()) { git_fetch_us(); } my $archive_hash = fetch_from_archive(); if (!$archive_hash) { $new_package or fail __ "package appears to be new in this suite;". " if this is intentional, use --new"; } supplementary_message(__ <<'END'); Push failed, while preparing your push. You can retry the push, after fixing the problem, if you like. END prep_ud(); access_giturl(); # check that success is vaguely likely rpush_handle_protovsn_bothends() if $we_are_initiator; my $clogpfn = dgit_privdir()."/changelog.822.tmp"; runcmd shell_cmd "exec >$clogpfn", qw(dpkg-parsechangelog); responder_send_file('parsed-changelog', $clogpfn); my ($clogp, $cversion, $dscfn) = push_parse_changelog("$clogpfn"); my $dscpath = "$buildproductsdir/$dscfn"; stat_exists $dscpath or fail f_ "looked for .dsc %s, but %s; maybe you forgot to build", $dscpath, $!; responder_send_file('dsc', $dscpath); push_parse_dsc($dscpath, $dscfn, $cversion); my $format = getfield $dsc, 'Format'; my $symref = git_get_symref(); my $actualhead = git_rev_parse('HEAD'); if (branch_is_gdr_unstitched_ff($symref, $actualhead, $archive_hash)) { if (quiltmode_splitting()) { my ($ffq_prev, $gdrlast) = branch_gdr_info($symref, $actualhead); fail f_ <{Version}; if (madformat_wantfixup($format)) { # user might have not used dgit build, so maybe do this now: if (do_split_brain()) { changedir $playground; my $cachekey; ($dgithead, $cachekey) = quilt_check_splitbrain_cache($actualhead, $upstreamversion); $dgithead or fail f_ "--quilt=%s but no cached dgit view: perhaps HEAD changed since dgit build[-source] ?", $quilt_mode; } if (!do_split_brain()) { # In split brain mode, do not attempt to incorporate dirty # stuff from the user's working tree. That would be mad. commit_quilty_patch(); } } if (do_split_brain()) { $made_split_brain = 1; $dgithead = splitbrain_pseudomerge($clogp, $actualhead, $dgithead, $archive_hash); $maintviewhead = $actualhead; changedir $maindir; prep_ud(); # so _only_subdir() works, below } if (defined $overwrite_version && !defined $maintviewhead && $archive_hash) { $dgithead = plain_overwrite_pseudomerge($clogp, $dgithead, $archive_hash); } check_not_dirty(); my $forceflag = ''; if ($archive_hash) { if (is_fast_fwd($archive_hash, $dgithead)) { # ok } elsif (deliberately_not_fast_forward) { $forceflag = '+'; } else { fail __ "dgit push: HEAD is not a descendant". " of the archive's version.\n". "To overwrite the archive's contents,". " pass --overwrite[=VERSION].\n". "To rewind history, if permitted by the archive,". " use --deliberately-not-fast-forward."; } } confess unless !!$made_split_brain == do_split_brain(); changedir $playground; progress f_ "checking that %s corresponds to HEAD", $dscfn; runcmd qw(dpkg-source -x --), $dscpath =~ m#^/# ? $dscpath : "$maindir/$dscpath"; my ($tree,$dir) = mktree_in_ud_from_only_subdir("source package"); check_for_vendor_patches() if madformat($dsc->{format}); changedir $maindir; my @diffcmd = (@git, qw(diff --quiet), $tree, $dgithead); debugcmd "+",@diffcmd; $!=0; $?=-1; my $r = system @diffcmd; if ($r) { if ($r==256) { my $referent = $made_split_brain ? $dgithead : 'HEAD'; my $diffs = cmdoutput @git, qw(diff --stat), $tree, $dgithead; my @mode_changes; my $raw = cmdoutput @git, qw(diff --no-renames -z -r --raw), $tree, $dgithead; my $changed; foreach (split /\0/, $raw) { if (defined $changed) { push @mode_changes, "$changed: $_\n" if $changed; $changed = undef; next; } elsif (m/^:0+ 0+ /) { $changed = ''; } elsif (m/^:(?:10*)?(\d+) (?:10*)?(\d+) /) { $changed = "Mode change from $1 to $2" } else { die "$_ ?"; } } if (@mode_changes) { fail +(f_ <{Files} =~ m{\.deb$}m; my $sourceonlypolicy = access_cfg 'source-only-uploads'; if ($sourceonlypolicy eq 'ok') { } elsif ($sourceonlypolicy eq 'always') { forceable_fail [qw(uploading-binaries)], __ "uploading binaries, although distro policy is source only" if $hasdebs; } elsif ($sourceonlypolicy eq 'never') { forceable_fail [qw(uploading-source-only)], __ "source-only upload, although distro policy requires .debs" if !$hasdebs; } elsif ($sourceonlypolicy eq 'not-wholly-new') { forceable_fail [qw(uploading-source-only)], f_ "source-only upload, even though package is entirely NEW\n". "(this is contrary to policy in %s)", access_nomdistro() if !$hasdebs && $new_package && !(archive_query('package_not_wholly_new', $package) // 1); } else { badcfg f_ "unknown source-only-uploads policy \`%s'", $sourceonlypolicy; } # Perhaps adjust .dsc to contain right set of origs changes_update_origs_from_dsc($dsc, $changes, $upstreamversion, $changesfile) unless forceing [qw(changes-origs-exactly)]; # Checks complete, we're going to try and go ahead: responder_send_file('changes',$changesfile); responder_send_command("param head $dgithead"); responder_send_command("param csuite $csuite"); responder_send_command("param isuite $isuite"); responder_send_command("param tagformat new"); # needed in $protovsn==4 responder_send_command("param splitbrain $do_split_brain"); if (defined $maintviewhead) { responder_send_command("param maint-view $maintviewhead"); } # Perhaps send buildinfo(s) for signing my $changes_files = getfield $changes, 'Files'; my @buildinfos = ($changes_files =~ m/ .* (\S+\.buildinfo)$/mg); foreach my $bi (@buildinfos) { responder_send_command("param buildinfo-filename $bi"); responder_send_file('buildinfo', "$buildproductsdir/$bi"); } if (deliberately_not_fast_forward) { git_for_each_ref(lrfetchrefs, sub { my ($objid,$objtype,$lrfetchrefname,$reftail) = @_; my $rrefname= substr($lrfetchrefname, length(lrfetchrefs) + 1); responder_send_command("previously $rrefname=$objid"); $previously{$rrefname} = $objid; }); } my @tagwants = push_tagwants($cversion, $dgithead, $maintviewhead, dgit_privdir()."/tag"); my @tagobjfns; supplementary_message(__ <<'END'); Push failed, while signing the tag. You can retry the push, after fixing the problem, if you like. END # If we manage to sign but fail to record it anywhere, it's fine. if ($we_are_responder) { @tagobjfns = map { $_->{Tfn}('.signed-tmp') } @tagwants; responder_receive_files('signed-tag', @tagobjfns); } else { @tagobjfns = push_mktags($clogp,$dscpath, $changesfile,$changesfile, \@tagwants); } supplementary_message(__ <<'END'); Push failed, *after* signing the tag. If you want to try again, you should use a new version number. END pairwise { $a->{TagObjFn} = $b } @tagwants, @tagobjfns; foreach my $tw (@tagwants) { my $tag = $tw->{Tag}; my $tagobjfn = $tw->{TagObjFn}; my $tag_obj_hash = cmdoutput @git, qw(hash-object -w -t tag), $tagobjfn; runcmd_ordryrun @git, qw(verify-tag), $tag_obj_hash; runcmd_ordryrun_local @git, qw(update-ref), "refs/tags/$tag", $tag_obj_hash; } supplementary_message(__ <<'END'); Push failed, while updating the remote git repository - see messages above. If you want to try again, you should use a new version number. END if (!check_for_git()) { create_remote_git_repo(); } my @pushrefs = $forceflag.$dgithead.":".rrref(); foreach my $tw (@tagwants) { push @pushrefs, $forceflag."refs/tags/$tw->{Tag}"; } runcmd_ordryrun @git, qw(-c push.followTags=false push), access_giturl(), @pushrefs; runcmd_ordryrun git_update_ref_cmd 'dgit push', lrref(), $dgithead; supplementary_message(__ <<'END'); Push failed, while obtaining signatures on the .changes and .dsc. If it was just that the signature failed, you may try again by using debsign by hand to sign the changes file (see the command dgit tried, above), and then dput that changes file to complete the upload. If you need to change the package, you must use a new version number. END if ($we_are_responder) { my $dryrunsuffix = act_local() ? "" : ".tmp"; my @rfiles = ($dscpath, $changesfile); push @rfiles, map { "$buildproductsdir/$_" } @buildinfos; responder_receive_files('signed-dsc-changes', map { "$_$dryrunsuffix" } @rfiles); } else { if (act_local()) { rename "$dscpath.tmp",$dscpath or die "$dscfn $!"; } else { progress f_ "[new .dsc left in %s.tmp]", $dscpath; } sign_changes $changesfile; } supplementary_message(f_ <&STDOUT" or confess "$!"; autoflush PO 1; open STDOUT, ">&STDERR" or confess "$!"; autoflush STDOUT 1; $vsnwant //= 1; ($protovsn) = grep { $vsnwant =~ m{^(?:.*,)?$_(?:,.*)?$} } @rpushprotovsn_support; fail f_ "build host has dgit rpush protocol versions %s". " but invocation host has %s", (join ",", @rpushprotovsn_support), $vsnwant unless defined $protovsn; changedir $dir; } sub cmd_remote_push_build_host { responder_send_command("dgit-remote-push-ready $protovsn"); &cmd_push; } sub pre_remote_push_responder { pre_remote_push_build_host(); } sub cmd_remote_push_responder { cmd_remote_push_build_host(); } # ... for compatibility with proto vsn.1 dgit (just so that user gets # a good error message) sub rpush_handle_protovsn_bothends () { } our $i_tmp; sub i_cleanup { local ($@, $?); my $report = i_child_report(); if (defined $report) { printdebug "($report)\n"; } elsif ($i_child_pid) { printdebug "(killing build host child $i_child_pid)\n"; kill 15, $i_child_pid; } if (defined $i_tmp && !defined $initiator_tempdir) { changedir "/"; eval { rmtree $i_tmp; }; } } END { return unless forkcheck_mainprocess(); i_cleanup(); } sub i_method { my ($base,$selector,@args) = @_; $selector =~ s/\-/_/g; { no strict qw(refs); &{"${base}_${selector}"}(@args); } } sub pre_rpush () { not_necessarily_a_tree(); } sub cmd_rpush { my $host = nextarg; my $dir; if ($host =~ m/^((?:[^][]|\[[^][]*\])*)\:/) { $host = $1; $dir = $'; #'; } else { $dir = nextarg; } $dir =~ s{^-}{./-}; my @rargs = ($dir); push @rargs, join ",", @rpushprotovsn_support; my @rdgit; push @rdgit, @dgit; push @rdgit, @ropts; push @rdgit, qw(remote-push-build-host), (scalar @rargs), @rargs; push @rdgit, @ARGV; my @cmd = (@ssh, $host, shellquote @rdgit); debugcmd "+",@cmd; $we_are_initiator=1; if (defined $initiator_tempdir) { rmtree $initiator_tempdir; mkdir $initiator_tempdir, 0700 or fail f_ "create %s: %s", $initiator_tempdir, $!; $i_tmp = $initiator_tempdir; } else { $i_tmp = tempdir(); } $i_child_pid = open2(\*RO, \*RI, @cmd); changedir $i_tmp; ($protovsn) = initiator_expect { m/^dgit-remote-push-ready (\S+)/ }; die "$protovsn ?" unless grep { $_ eq $protovsn } @rpushprotovsn_support; for (;;) { my ($icmd,$iargs) = initiator_expect { m/^(\S+)(?: (.*))?$/; ($1,$2); }; i_method "i_resp", $icmd, $iargs; } } sub i_resp_progress ($) { my ($rhs) = @_; my $msg = protocol_read_bytes \*RO, $rhs; progress $msg; } sub i_resp_supplementary_message ($) { my ($rhs) = @_; $supplementary_message = protocol_read_bytes \*RO, $rhs; } sub i_resp_complete { my $pid = $i_child_pid; $i_child_pid = undef; # prevents killing some other process with same pid printdebug "waiting for build host child $pid...\n"; my $got = waitpid $pid, 0; confess "$!" unless $got == $pid; fail f_ "build host child failed: %s", waitstatusmsg() if $?; i_cleanup(); printdebug __ "all done\n"; finish 0; } sub i_resp_file ($) { my ($keyword) = @_; my $localname = i_method "i_localname", $keyword; my $localpath = "$i_tmp/$localname"; stat_exists $localpath and badproto \*RO, f_ "file %s (%s) twice", $keyword, $localpath; protocol_receive_file \*RO, $localpath; i_method "i_file", $keyword; } our %i_param; sub i_resp_param ($) { $_[0] =~ m/^(\S+) (.*)$/ or badproto \*RO, __ "bad param spec"; $i_param{$1} = $2; } sub i_resp_previously ($) { $_[0] =~ m#^(refs/tags/\S+)=(\w+)$# or badproto \*RO, __ "bad previously spec"; my $r = system qw(git check-ref-format), $1; confess "bad previously ref spec ($r)" if $r; $previously{$1} = $2; } our %i_wanted; our ($i_clogp, $i_version, $i_dscfn, $i_changesfn, @i_buildinfos); sub i_resp_want ($) { my ($keyword) = @_; die "$keyword ?" if $i_wanted{$keyword}++; defined $i_param{'csuite'} or badproto \*RO, "premature desire, no csuite"; $isuite = $i_param{'isuite'} // $i_param{'csuite'}; die unless $isuite =~ m/^$suite_re$/; if (!defined $dsc) { pushing(); rpush_handle_protovsn_bothends(); push_parse_dsc $i_dscfn, 'remote dsc', $i_version; if ($protovsn >= 6) { determine_whether_split_brain getfield $dsc, 'Format'; $do_split_brain eq ($i_param{'splitbrain'} // '') or badproto \*RO, "split brain mismatch, $do_split_brain != $i_param{'split_brain'}"; printdebug "rpush split brain $do_split_brain\n"; } } my @localpaths = i_method "i_want", $keyword; printdebug "[[ $keyword @localpaths\n"; foreach my $localpath (@localpaths) { protocol_send_file \*RI, $localpath; } print RI "files-end\n" or confess "$!"; } sub i_localname_parsed_changelog { return "remote-changelog.822"; } sub i_file_parsed_changelog { ($i_clogp, $i_version, $i_dscfn) = push_parse_changelog "$i_tmp/remote-changelog.822"; die if $i_dscfn =~ m#/|^\W#; } sub i_localname_dsc { defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)"; return $i_dscfn; } sub i_file_dsc { } sub i_localname_buildinfo ($) { my $bi = $i_param{'buildinfo-filename'}; defined $bi or badproto \*RO, "buildinfo before filename"; defined $i_changesfn or badproto \*RO, "buildinfo before changes"; $bi =~ m{^\Q$package\E_[!-.0-~]*\.buildinfo$}s or badproto \*RO, "improper buildinfo filename"; return $&; } sub i_file_buildinfo { my $bi = $i_param{'buildinfo-filename'}; my $bd = parsecontrol "$i_tmp/$bi", $bi; my $ch = parsecontrol "$i_tmp/$i_changesfn", 'changes'; if (!forceing [qw(buildinfo-changes-mismatch)]) { files_compare_inputs($bd, $ch); (getfield $bd, $_) eq (getfield $ch, $_) or fail f_ "buildinfo mismatch in field %s", $_ foreach qw(Source Version); !defined $bd->{$_} or fail f_ "buildinfo contains forbidden field %s", $_ foreach qw(Changes Changed-by Distribution); } push @i_buildinfos, $bi; delete $i_param{'buildinfo-filename'}; } sub i_localname_changes { defined $i_dscfn or badproto \*RO, "dsc (before parsed-changelog)"; $i_changesfn = $i_dscfn; $i_changesfn =~ s/\.dsc$/_dgit.changes/ or die; return $i_changesfn; } sub i_file_changes { } sub i_want_signed_tag { printdebug Dumper(\%i_param, $i_dscfn); defined $i_param{'head'} && defined $i_dscfn && defined $i_clogp && defined $i_param{'csuite'} or badproto \*RO, "premature desire for signed-tag"; my $head = $i_param{'head'}; die if $head =~ m/[^0-9a-f]/ || $head !~ m/^../; my $maintview = $i_param{'maint-view'}; die if defined $maintview && $maintview =~ m/[^0-9a-f]/; if ($protovsn == 4) { my $p = $i_param{'tagformat'} // ''; $p eq 'new' or badproto \*RO, "tag format mismatch: $p vs. new"; } die unless $i_param{'csuite'} =~ m/^$suite_re$/; $csuite = $&; defined $dsc or badproto \*RO, "dsc (before parsed-changelog)"; my @tagwants = push_tagwants $i_version, $head, $maintview, "tag"; return push_mktags $i_clogp, $i_dscfn, $i_changesfn, (__ 'remote changes file'), \@tagwants; } sub i_want_signed_dsc_changes { rename "$i_dscfn.tmp","$i_dscfn" or die "$i_dscfn $!"; sign_changes $i_changesfn; return ($i_dscfn, $i_changesfn, @i_buildinfos); } #---------- building etc. ---------- our $version; our $sourcechanges; our $dscfn; #----- `3.0 (quilt)' handling ----- our $fakeeditorenv = 'DGIT_FAKE_EDITOR_QUILT'; sub quiltify_dpkg_commit ($$$;$) { my ($patchname,$author,$msg, $xinfo) = @_; $xinfo //= ''; mkpath '.git/dgit'; # we are in playtree my $descfn = ".git/dgit/quilt-description.tmp"; open O, '>', $descfn or confess "$descfn: $!"; $msg =~ s/\n+/\n\n/; print O <{$fn} # is set for each modified .gitignore filename $fn # if $unrepres is defined, array ref to which is appeneded # a list of unrepresentable changes (removals of upstream files # (as messages) local $/=undef; my @cmd = (@git, qw(diff-tree -z --no-renames)); push @cmd, qw(--name-only) unless $unrepres; push @cmd, qw(-r) if $finegrained || $unrepres; push @cmd, $x, $y; my $diffs= cmdoutput @cmd; my $r = 0; my @lmodes; foreach my $f (split /\0/, $diffs) { if ($unrepres && !@lmodes) { @lmodes = $f =~ m/^\:(\w+) (\w+) \w+ \w+ / or die "$_ ?"; next; } my ($oldmode,$newmode) = @lmodes; @lmodes = (); next if $f =~ m#^debian(?:/.*)?$#s; if ($unrepres) { eval { die __ "not a plain file or symlink\n" unless $newmode =~ m/^(?:10|12)\d{4}$/ || $oldmode =~ m/^(?:10|12)\d{4}$/; if ($oldmode =~ m/[^0]/ && $newmode =~ m/[^0]/) { # both old and new files exist die __ "mode or type changed\n" if $oldmode ne $newmode; die __ "modified symlink\n" unless $newmode =~ m/^10/; } elsif ($oldmode =~ m/[^0]/) { # deletion die __ "deletion of symlink\n" unless $oldmode =~ m/^10/; } else { # creation die __ "creation with non-default mode\n" unless $newmode =~ m/^100644$/ or $newmode =~ m/^120000$/; } }; if ($@) { local $/="\n"; chomp $@; push @$unrepres, [ $f, "$@ ($oldmode->$newmode)" ]; } } my $isignore = $f =~ m#^(?:.*/)?.gitignore$#s; $r |= $isignore ? 02 : 01; $ignorenamesr->{$f}=1 if $ignorenamesr && $isignore; } printdebug "quiltify_trees_differ $x $y => $r\n"; return $r; } sub quiltify_tree_sentinelfiles ($) { # lists the `sentinel' files present in the tree my ($x) = @_; my $r = cmdoutput @git, qw(ls-tree --name-only), $x, qw(-- debian/rules debian/control); $r =~ s/\n/,/g; return $r; } sub quiltify_splitting ($$$$$$$) { my ($clogp, $unapplied, $headref, $oldtiptree, $diffbits, $editedignores, $cachekey) = @_; my $gitignore_special = 1; if ($quilt_mode !~ m/gbp|dpm|baredebian/) { # treat .gitignore just like any other upstream file $diffbits = { %$diffbits }; $_ = !!$_ foreach values %$diffbits; $gitignore_special = 0; } # We would like any commits we generate to be reproducible my @authline = clogp_authline($clogp); local $ENV{GIT_COMMITTER_NAME} = $authline[0]; local $ENV{GIT_COMMITTER_EMAIL} = $authline[1]; local $ENV{GIT_COMMITTER_DATE} = $authline[2]; local $ENV{GIT_AUTHOR_NAME} = $authline[0]; local $ENV{GIT_AUTHOR_EMAIL} = $authline[1]; local $ENV{GIT_AUTHOR_DATE} = $authline[2]; confess unless do_split_brain(); my $fulldiffhint = sub { my ($x,$y) = @_; my $cmd = "git diff $x $y -- :/ ':!debian'"; $cmd .= " ':!/.gitignore' ':!*/.gitignore'" if $gitignore_special; return f_ "\nFor full diff showing the problem(s), type:\n %s\n", $cmd; }; if ($quilt_mode =~ m/gbp|unapplied|baredebian/ && ($diffbits->{O2H} & 01)) { my $msg = f_ "--quilt=%s specified, implying patches-unapplied git tree\n". " but git tree differs from orig in upstream files.", $quilt_mode; $msg .= $fulldiffhint->($unapplied, 'HEAD'); if (!stat_exists "debian/patches" and $quilt_mode !~ m/baredebian/) { $msg .= __ "\n ... debian/patches is missing; perhaps this is a patch queue branch?"; } fail $msg; } if ($quilt_mode =~ m/dpm/ && ($diffbits->{H2A} & 01)) { fail +(f_ <($oldtiptree,'HEAD'); --quilt=%s specified, implying patches-applied git tree but git tree differs from result of applying debian/patches to upstream END } if ($quilt_mode =~ m/baredebian/) { # We need to construct a merge which has upstream files from # upstream and debian/ files from HEAD. read_tree_upstream $quilt_upstream_commitish, 1, $headref; my $version = getfield $clogp, 'Version'; my $upsversion = upstreamversion $version; my $merge = make_commit [ $headref, $quilt_upstream_commitish ], [ +(f_ <{O2A} & 01)) { # some patches progress __ "dgit view: creating patches-applied version using gbp pq"; runcmd shell_cmd 'exec >/dev/null', gbp_pq, qw(import); # gbp pq import creates a fresh branch; push back to dgit-view runcmd @git, qw(update-ref refs/heads/dgit-view HEAD); runcmd @git, qw(checkout -q dgit-view); } if ($quilt_mode =~ m/gbp|dpm/ && ($diffbits->{O2A} & 02)) { fail f_ <{O2H} & 02) && # user has modified .gitignore !($diffbits->{O2A} & 02)) { # patches do not change .gitignore progress __ "dgit view: creating patch to represent .gitignore changes"; ensuredir "debian/patches"; my $gipatch = "debian/patches/auto-gitignore"; open GIPATCH, ">>", "$gipatch" or confess "$gipatch: $!"; stat GIPATCH or confess "$gipatch: $!"; fail f_ "%s already exists; but want to create it". " to record .gitignore changes", $gipatch if (stat _)[7]; print GIPATCH +(__ <>$gipatch", @git, qw(diff), $unapplied, $headref, "--", sort keys %$editedignores; open SERIES, "+>>", "debian/patches/series" or confess "$!"; defined seek SERIES, -1, 2 or $!==EINVAL or confess "$!"; my $newline; defined read SERIES, $newline, 1 or confess "$!"; print SERIES "\n" or confess "$!" unless $newline eq "\n"; print SERIES "auto-gitignore\n" or confess "$!"; close SERIES or die $!; runcmd @git, qw(add -f -- debian/patches/series), $gipatch; commit_admin +(__ < $git_commit_id, # Child => $c, # or undef if P=T # Whynot => $reason_edge_PC_unsuitable, # in @nots only # Nontrivial => true iff $p..$c has relevant changes # }; my @todo; my @nots; my $sref_S; my $max_work=100; my %considered; # saves being exponential on some weird graphs my $t_sentinels = quiltify_tree_sentinelfiles $target; my $not = sub { my ($search,$whynot) = @_; printdebug " search NOT $search->{Commit} $whynot\n"; $search->{Whynot} = $whynot; push @nots, $search; no warnings qw(exiting); next; }; push @todo, { Commit => $target, }; while (@todo) { my $c = shift @todo; next if $considered{$c->{Commit}}++; $not->($c, __ "maximum search space exceeded") if --$max_work <= 0; printdebug "quiltify investigate $c->{Commit}\n"; # are we done? if (!quiltify_trees_differ $c->{Commit}, $oldtiptree) { printdebug " search finished hooray!\n"; $sref_S = $c; last; } quiltify_nofix_bail " $c->{Commit}", " (tree object $oldtiptree)"; if ($quilt_mode eq 'smash') { printdebug " search quitting smash\n"; last; } my $c_sentinels = quiltify_tree_sentinelfiles $c->{Commit}; $not->($c, f_ "has %s not %s", $c_sentinels, $t_sentinels) if $c_sentinels ne $t_sentinels; my $commitdata = cmdoutput @git, qw(cat-file commit), $c->{Commit}; $commitdata =~ m/\n\n/; $commitdata =~ $`; my @parents = ($commitdata =~ m/^parent (\w+)$/gm); @parents = map { { Commit => $_, Child => $c } } @parents; $not->($c, __ "root commit") if !@parents; foreach my $p (@parents) { $p->{Nontrivial}= quiltify_trees_differ $p->{Commit},$c->{Commit}; } my $ndiffers = grep { $_->{Nontrivial} } @parents; $not->($c, f_ "merge (%s nontrivial parents)", $ndiffers) if $ndiffers > 1; foreach my $p (@parents) { printdebug "considering C=$c->{Commit} P=$p->{Commit}\n"; my @cmd= (@git, qw(diff-tree -r --name-only), $p->{Commit},$c->{Commit}, qw(-- debian/patches .pc debian/source/format)); my $patchstackchange = cmdoutput @cmd; if (length $patchstackchange) { $patchstackchange =~ s/\n/,/g; $not->($p, f_ "changed %s", $patchstackchange); } printdebug " search queue P=$p->{Commit} ", ($p->{Nontrivial} ? "NT" : "triv"),"\n"; push @todo, $p; } } if (!$sref_S) { printdebug "quiltify want to smash\n"; my $abbrev = sub { my $x = $_[0]{Commit}; $x =~ s/(.*?[0-9a-z]{8})[0-9a-z]*$/$1/; return $x; }; if ($quilt_mode eq 'linear') { print STDERR f_ "\n%s: error: quilt fixup cannot be linear. Stopped at:\n", $us; my $all_gdr = !!@nots; foreach my $notp (@nots) { my $c = $notp->{Child}; my $cprange = $abbrev->($notp); $cprange .= "..".$abbrev->($c) if $c; print STDERR f_ "%s: %s: %s\n", $us, $cprange, $notp->{Whynot}; $all_gdr &&= $notp->{Child} && (git_cat_file $notp->{Child}{Commit}, 'commit') =~ m{^\[git-debrebase(?! split[: ]).*\]$}m; } print STDERR "\n"; $failsuggestion = [ grep { $_->[0] ne 'quilt-mode' } @$failsuggestion ] if $all_gdr; print STDERR "$us: $_->[1]\n" foreach @$failsuggestion; fail __ "quilt history linearisation failed. Search \`quilt fixup' in dgit(7).\n"; } elsif ($quilt_mode eq 'smash') { } elsif ($quilt_mode eq 'auto') { progress __ "quilt fixup cannot be linear, smashing..."; } else { confess "$quilt_mode ?"; } my $time = $ENV{'GIT_COMMITTER_DATE'} || time; $time =~ s/\s.*//; # trim timezone from GIT_COMMITTER_DATE my $ncommits = 3; my $msg = cmdoutput @git, qw(log), "-n$ncommits"; quiltify_dpkg_commit "auto-$version-$target-$time", (getfield $clogp, 'Maintainer'), (f_ "Automatically generated patch (%s)\n". "Last (up to) %s git changes, FYI:\n\n", $clogp->{Version}, $ncommits). $msg; return; } progress __ "quiltify linearisation planning successful, executing..."; for (my $p = $sref_S; my $c = $p->{Child}; $p = $p->{Child}) { printdebug "quiltify traverse $p->{Commit}..$c->{Commit}\n"; next unless $p->{Nontrivial}; my $cc = $c->{Commit}; my $commitdata = cmdoutput @git, qw(cat-file commit), $cc; $commitdata =~ m/\n\n/ or die "$c ?"; $commitdata = $`; my $msg = $'; #'; $commitdata =~ m/^author (.*) \d+ [-+0-9]+$/m or die "$cc ?"; my $author = $1; my $commitdate = cmdoutput @git, qw(log -n1 --pretty=format:%aD), $cc; $msg =~ s/^(.*)\n*/$1\n/ or die "$cc $msg ?"; my $strip_nls = sub { $msg =~ s/\n+$//; $msg .= "\n"; }; $strip_nls->(); my $title = $1; my $patchname; my $patchdir; my $gbp_check_suitable = sub { $_ = shift; my ($what) = @_; eval { die __ "contains unexpected slashes\n" if m{//} || m{/$}; die __ "contains leading punctuation\n" if m{^\W} || m{/\W}; die __ "contains bad character(s)\n" if m{[^-a-z0-9_.+=~/]}i; die __ "is series file\n" if m{$series_filename_re}o; die __ "too long\n" if length > 200; }; return $_ unless $@; print STDERR f_ "quiltifying commit %s: ignoring/dropping Gbp-Pq %s: %s", $cc, $what, $@; return undef; }; if ($msg =~ s/^ (?: gbp(?:-pq)? : \s* name \s+ | gbp-pq-name: \s* ) (\S+) \s* \n //ixm) { $patchname = $gbp_check_suitable->($1, 'Name'); } if ($msg =~ s/^ (?: gbp(?:-pq)? : \s* topic \s+ | gbp-pq-topic: \s* ) (\S+) \s* \n //ixm) { $patchdir = $gbp_check_suitable->($1, 'Topic'); } $strip_nls->(); if (!defined $patchname) { $patchname = $title; $patchname =~ s/[.:]$//; use Text::Iconv; eval { my $converter = new Text::Iconv qw(UTF-8 ASCII//TRANSLIT); my $translitname = $converter->convert($patchname); die unless defined $translitname; $patchname = $translitname; }; print STDERR +(f_ "dgit: patch title transliteration error: %s", $@) if $@; $patchname =~ y/ A-Z/-a-z/; $patchname =~ y/-a-z0-9_.+=~//cd; $patchname =~ s/^\W/x-$&/; $patchname = substr($patchname,0,40); $patchname .= ".patch"; } if (!defined $patchdir) { $patchdir = ''; } if (length $patchdir) { $patchname = "$patchdir/$patchname"; } if ($patchname =~ m{^(.*)/}) { mkpath "debian/patches/$1"; } my $index; for ($index=''; stat "debian/patches/$patchname$index"; $index++) { } $!==ENOENT or confess "$patchname$index $!"; runcmd @git, qw(checkout -q), $cc; # We use the tip's changelog so that dpkg-source doesn't # produce complaining messages from dpkg-parsechangelog. None # of the information dpkg-source gets from the changelog is # actually relevant - it gets put into the original message # which dpkg-source provides our stunt editor, and then # overwritten. runcmd @git, qw(checkout -q), $target, qw(debian/changelog); quiltify_dpkg_commit "$patchname$index", $author, $msg, "Date: $commitdate\n". "X-Dgit-Generated: $clogp->{Version} $cc\n"; runcmd @git, qw(checkout -q), $cc, qw(debian/changelog); } } sub build_maybe_quilt_fixup () { my ($format,$fopts) = get_source_format; return unless madformat_wantfixup $format; # sigh check_for_vendor_patches(); my $clogp = parsechangelog(); my $headref = git_rev_parse('HEAD'); my $symref = git_get_symref(); my $upstreamversion = upstreamversion $version; prep_ud(); changedir $playground; my $splitbrain_cachekey; if (do_split_brain()) { my $cachehit; ($cachehit, $splitbrain_cachekey) = quilt_check_splitbrain_cache($headref, $upstreamversion); if ($cachehit) { changedir $maindir; return; } } unpack_playtree_need_cd_work($headref); if (do_split_brain()) { runcmd @git, qw(checkout -q -b dgit-view); # so long as work is not deleted, its current branch will # remain dgit-view, rather than master, so subsequent calls to # unpack_playtree_need_cd_work # will DTRT, resetting dgit-view. confess if $made_split_brain; $made_split_brain = 1; } chdir '..'; if ($fopts->{'single-debian-patch'}) { fail f_ "quilt mode %s does not make sense (or is not supported) with single-debian-patch", $quilt_mode if quiltmode_splitting(); quilt_fixup_singlepatch($clogp, $headref, $upstreamversion); } else { quilt_fixup_multipatch($clogp, $headref, $upstreamversion, $splitbrain_cachekey); } if (do_split_brain()) { my $dgitview = git_rev_parse 'HEAD'; changedir $maindir; reflog_cache_insert "refs/$splitbraincache", $splitbrain_cachekey, $dgitview; changedir "$playground/work"; my $saved = maybe_split_brain_save $headref, $dgitview, __ "converted"; progress f_ "dgit view: created (%s)", $saved; } changedir $maindir; runcmd_ordryrun_local @git, qw(pull --ff-only -q), "$playground/work", qw(master); } sub build_check_quilt_splitbrain () { build_maybe_quilt_fixup(); } sub unpack_playtree_need_cd_work ($) { my ($headref) = @_; # prep_ud() must have been called already. if (!chdir "work") { # Check in the filesystem because sometimes we run prep_ud # in between multiple calls to unpack_playtree_need_cd_work. confess "$!" unless $!==ENOENT; mkdir "work" or confess "$!"; changedir "work"; mktree_in_ud_here(); } runcmd @git, qw(reset -q --hard), $headref; } sub unpack_playtree_linkorigs ($$) { my ($upstreamversion, $fn) = @_; # calls $fn->($leafname); my $bpd_abs = bpd_abs(); dotdot_bpd_transfer_origs $bpd_abs, $upstreamversion, sub { 1 }; opendir QFD, $bpd_abs or fail "buildproductsdir: $bpd_abs: $!"; while ($!=0, defined(my $leaf = readdir QFD)) { my $f = bpd_abs()."/".$leaf; { local ($debuglevel) = $debuglevel-1; printdebug "QF linkorigs bpd $leaf, $f ?\n"; } next unless is_orig_file_of_vsn $leaf, $upstreamversion; printdebug "QF linkorigs $leaf, $f Y\n"; link_ltarget $f, $leaf or die "$leaf $!"; $fn->($leaf); } die "$buildproductsdir: $!" if $!; closedir QFD; } sub quilt_fixup_delete_pc () { runcmd @git, qw(rm -rqf .pc); commit_admin +(__ <' or confess "$!"; print $fakedsc <addfile($fh); print $fakedsc " ".$md->hexdigest." $size $leaf\n" or confess "$!"; }; unpack_playtree_linkorigs($upstreamversion, $dscaddfile); my @files=qw(debian/source/format debian/rules debian/control debian/changelog); foreach my $maybe (qw(debian/patches debian/source/options debian/tests/control)) { next unless stat_exists "$maindir/$maybe"; push @files, $maybe; } my $debtar= srcfn $fakeversion,'.debian.tar.gz'; runcmd qw(env GZIP=-1n tar -zcf), "./$debtar", qw(-C), $maindir, @files; $dscaddfile->($debtar); close $fakedsc or confess "$!"; } sub quilt_fakedsc2unapplied ($$) { my ($headref, $upstreamversion) = @_; # must be run in the playground # quilt_need_fake_dsc must have been called quilt_need_fake_dsc($upstreamversion); runcmd qw(sh -ec), 'exec dpkg-source --no-check --skip-patches -x fake.dsc >/dev/null'; my $fakexdir= $package.'-'.(stripepoch $upstreamversion); rename $fakexdir, "fake" or die "$fakexdir $!"; changedir 'fake'; remove_stray_gits(__ "source package"); mktree_in_ud_here(); rmtree '.pc'; rmtree 'debian'; # git checkout commitish paths does not delete! runcmd @git, qw(checkout -f), $headref, qw(-- debian); my $unapplied=git_add_write_tree(); printdebug "fake orig tree object $unapplied\n"; return $unapplied; } sub quilt_check_splitbrain_cache ($$) { my ($headref, $upstreamversion) = @_; # Called only if we are in (potentially) split brain mode. # Called in playground. # Computes the cache key and looks in the cache. # Returns ($dgit_view_commitid, $cachekey) or (undef, $cachekey) quilt_need_fake_dsc($upstreamversion); my $splitbrain_cachekey; progress f_ "dgit: split brain (separate dgit view) may be needed (--quilt=%s).", $quilt_mode; # we look in the reflog of dgit-intern/quilt-cache # we look for an entry whose message is the key for the cache lookup my @cachekey = (qw(dgit), $our_version); push @cachekey, $upstreamversion; push @cachekey, $quilt_mode; push @cachekey, $headref; push @cachekey, $quilt_upstream_commitish // '-'; push @cachekey, hashfile('fake.dsc'); my $srcshash = Digest::SHA->new(256); my %sfs = ( %INC, '$0(dgit)' => $0 ); foreach my $sfk (sort keys %sfs) { next unless $sfk =~ m/^\$0\b/ || $sfk =~ m{^Debian/Dgit\b}; $srcshash->add($sfk," "); $srcshash->add(hashfile($sfs{$sfk})); $srcshash->add("\n"); } push @cachekey, $srcshash->hexdigest(); $splitbrain_cachekey = "@cachekey"; printdebug "splitbrain cachekey $splitbrain_cachekey\n"; my $cachehit = reflog_cache_lookup "refs/$splitbraincache", $splitbrain_cachekey; if ($cachehit) { unpack_playtree_need_cd_work($headref); my $saved = maybe_split_brain_save $headref, $cachehit, "cache-hit"; if ($cachehit ne $headref) { progress f_ "dgit view: found cached (%s)", $saved; runcmd @git, qw(checkout -q -b dgit-view), $cachehit; $made_split_brain = 1; return ($cachehit, $splitbrain_cachekey); } progress __ "dgit view: found cached, no changes required"; return ($headref, $splitbrain_cachekey); } printdebug "splitbrain cache miss\n"; return (undef, $splitbrain_cachekey); } sub baredebian_origtarballs_scan ($$$) { my ($fakedfi, $upstreamversion, $dir) = @_; if (!opendir OD, $dir) { return if $! == ENOENT; fail "opendir $dir (origs): $!"; } while ($!=0, defined(my $leaf = readdir OD)) { { local ($debuglevel) = $debuglevel-1; printdebug "BDOS $dir $leaf ?\n"; } next unless is_orig_file_of_vsn $leaf, $upstreamversion; next if grep { $_->{Filename} eq $leaf } @$fakedfi; push @$fakedfi, { Filename => $leaf, Path => "$dir/$leaf", }; } die "$dir; $!" if $!; closedir OD; } sub quilt_fixup_multipatch ($$$) { my ($clogp, $headref, $upstreamversion, $splitbrain_cachekey) = @_; progress f_ "examining quilt state (multiple patches, %s mode)", $quilt_mode; # Our objective is: # - honour any existing .pc in case it has any strangeness # - determine the git commit corresponding to the tip of # the patch stack (if there is one) # - if there is such a git commit, convert each subsequent # git commit into a quilt patch with dpkg-source --commit # - otherwise convert all the differences in the tree into # a single git commit # # To do this we: # Our git tree doesn't necessarily contain .pc. (Some versions of # dgit would include the .pc in the git tree.) If there isn't # one, we need to generate one by unpacking the patches that we # have. # # We first look for a .pc in the git tree. If there is one, we # will use it. (This is not the normal case.) # # Otherwise need to regenerate .pc so that dpkg-source --commit # can work. We do this as follows: # 1. Collect all relevant .orig from parent directory # 2. Generate a debian.tar.gz out of # debian/{patches,rules,source/format,source/options} # 3. Generate a fake .dsc containing just these fields: # Format Source Version Files # 4. Extract the fake .dsc # Now the fake .dsc has a .pc directory. # (In fact we do this in every case, because in future we will # want to search for a good base commit for generating patches.) # # Then we can actually do the dpkg-source --commit # 1. Make a new working tree with the same object # store as our main tree and check out the main # tree's HEAD. # 2. Copy .pc from the fake's extraction, if necessary # 3. Run dpkg-source --commit # 4. If the result has changes to debian/, then # - git add them them # - git add .pc if we had a .pc in-tree # - git commit # 5. If we had a .pc in-tree, delete it, and git commit # 6. Back in the main tree, fast forward to the new HEAD # Another situation we may have to cope with is gbp-style # patches-unapplied trees. # # We would want to detect these, so we know to escape into # quilt_fixup_gbp. However, this is in general not possible. # Consider a package with a one patch which the dgit user reverts # (with git revert or the moral equivalent). # # That is indistinguishable in contents from a patches-unapplied # tree. And looking at the history to distinguish them is not # useful because the user might have made a confusing-looking git # history structure (which ought to produce an error if dgit can't # cope, not a silent reintroduction of an unwanted patch). # # So gbp users will have to pass an option. But we can usually # detect their failure to do so: if the tree is not a clean # patches-applied tree, quilt linearisation fails, but the tree # _is_ a clean patches-unapplied tree, we can suggest that maybe # they want --quilt=unapplied. # # To help detect this, when we are extracting the fake dsc, we # first extract it with --skip-patches, and then apply the patches # afterwards with dpkg-source --before-build. That lets us save a # tree object corresponding to .origs. if ($quilt_mode eq 'linear' && branch_is_gdr($headref)) { # This is much faster. It also makes patches that gdr # likes better for future updates without laundering. # # However, it can fail in some casses where we would # succeed: if there are existing patches, which correspond # to a prefix of the branch, but are not in gbp/gdr # format, gdr will fail (exiting status 7), but we might # be able to figure out where to start linearising. That # will be slower so hopefully there's not much to do. unpack_playtree_need_cd_work $headref; my @cmd = (@git_debrebase, qw(--noop-ok -funclean-mixed -funclean-ordering make-patches --quiet-would-amend)); # We tolerate soe snags that gdr wouldn't, by default. if (act_local()) { debugcmd "+",@cmd; $!=0; $?=-1; failedcmd @cmd if system @cmd and not ($? == 7*256 or $? == -1 && $!==ENOENT); } else { dryrun_report @cmd; } $headref = git_rev_parse('HEAD'); chdir '..'; } my $unapplied=quilt_fakedsc2unapplied($headref, $upstreamversion); ensuredir '.pc'; my @bbcmd = (qw(sh -ec), 'exec dpkg-source --before-build . >/dev/null'); $!=0; $?=-1; if (system @bbcmd) { failedcmd @bbcmd if $? < 0; fail __ <{Commit}; if ($ti->{OrigPart} eq 'orig') { runcmd qw(git read-tree), $c; } elsif ($ti->{OrigPart} =~ m/orig-/) { read_tree_subdir $', $c; } else { confess "$ti->OrigPart} ?" } $parents .= "parent $c\n"; } my $tree = git_write_tree(); my $mbody = f_ 'Combine orig tarballs for %s %s', $package, $upstreamversion; $uheadref = hash_commit_text < quiltify_trees_differ($unapplied,$uheadref, 1, \%editedignores, \@unrepres), H2A => quiltify_trees_differ($uheadref, $oldtiptree,1), O2A => quiltify_trees_differ($unapplied,$oldtiptree,1), }; my @dl; foreach my $bits (qw(01 02)) { foreach my $v (qw(O2H O2A H2A)) { push @dl, ($diffbits->{$v} & $bits) ? '##' : '=='; } } printdebug "differences \@dl @dl.\n"; progress f_ "%s: base trees orig=%.20s o+d/p=%.20s", $us, $unapplied, $oldtiptree; # TRANSLATORS: Try to keep this ascii-art layout right. The 0s in # %9.00009s will be ignored and are there to make the format the # same length (9 characters) as the output it generates. If you # change the value 9, your translations of "upstream" and # 'tarball' must fit into the new length, and you should change # the number of 0s. Do not reduce it below 4 as HEAD has to fit # too. progress f_ "%s: quilt differences: src: %s orig %s gitignores: %s orig %s\n". "%s: quilt differences: %9.00009s %s o+d/p %9.00009s %s o+d/p", $us, $dl[0], $dl[1], $dl[3], $dl[4], $us, $uhead_whatshort, $dl[2], $uhead_whatshort, $dl[5]; if (@unrepres && $quilt_mode !~ m/baredebian/) { # With baredebian, even if the upstream commitish has this # problem, we don't want to print this message, as nothing # is going to try to make a patch out of it anyway. print STDERR f_ "dgit: cannot represent change: %s: %s\n", $_->[1], $_->[0] foreach @unrepres; forceable_fail [qw(unrepresentable)], __ <{O2H} & $diffbits->{O2A})) { push @failsuggestion, [ 'unapplied', __ "This might be a patches-unapplied branch." ]; } elsif (!($diffbits->{H2A} & $diffbits->{O2A})) { push @failsuggestion, [ 'applied', __ "This might be a patches-applied branch." ]; } push @failsuggestion, [ 'quilt-mode', __ "Maybe you need one of --[quilt=]gbp --[quilt=]dpm --quilt=unapplied ?" ]; push @failsuggestion, [ 'gitattrs', __ "Warning: Tree has .gitattributes. See GITATTRIBUTES in dgit(7)." ] if stat_exists '.gitattributes'; push @failsuggestion, [ 'origs', __ "Maybe orig tarball(s) are not identical to git representation?" ] unless $onlydebian && $quilt_mode !~ m/baredebian/; # ^ in that case, we didn't really look properly if (quiltmode_splitting()) { quiltify_splitting($clogp, $unapplied, $headref, $oldtiptree, $diffbits, \%editedignores, $splitbrain_cachekey); return; } progress f_ "starting quiltify (multiple patches, %s mode)", $quilt_mode; quiltify($clogp,$headref,$oldtiptree,\@failsuggestion); runcmd @git, qw(checkout -q), (qw(master dgit-view)[do_split_brain()]); if (!open P, '>>', ".pc/applied-patches") { $!==&ENOENT or confess "$!"; } else { close P; } commit_quilty_patch(); if ($mustdeletepc) { quilt_fixup_delete_pc(); } } sub quilt_fixup_editor () { my $descfn = $ENV{$fakeeditorenv}; my $editing = $ARGV[$#ARGV]; open I1, '<', $descfn or confess "$descfn: $!"; open I2, '<', $editing or confess "$editing: $!"; unlink $editing or confess "$editing: $!"; open O, '>', $editing or confess "$editing: $!"; while () { print O or confess "$!"; } I1->error and confess "$!"; my $copying = 0; while () { $copying ||= m/^\-\-\- /; next unless $copying; print O or confess "$!"; } I2->error and confess "$!"; close O or die $1; finish 0; } sub maybe_apply_patches_dirtily () { return unless $quilt_mode =~ m/gbp|unapplied|baredebian/; print STDERR __ <[0] } @vsns; @vsns = sort { -version_compare($a, $b) } @vsns; $changes_since_version = $vsns[0]; progress f_ "changelog will contain changes since %s", $vsns[0]; } else { $changes_since_version = '_'; progress __ "package seems new, not specifying -v"; } } if ($changes_since_version ne '_') { return ("-v$changes_since_version"); } else { return (); } } sub changesopts () { return (changesopts_initial(), changesopts_version()); } sub massage_dbp_args ($;$) { my ($cmd,$xargs) = @_; # Since we split the source build out so we can do strange things # to it, massage the arguments to dpkg-buildpackage so that the # main build doessn't build source (or add an argument to stop it # building source by default). debugcmd '#massaging#', @$cmd if $debuglevel>1; # -nc has the side effect of specifying -b if nothing else specified # and some combinations of -S, -b, et al, are errors, rather than # later simply overriding earlie. So we need to: # - search the command line for these options # - pick the last one # - perhaps add our own as a default # - perhaps adjust it to the corresponding non-source-building version my $dmode = '-F'; foreach my $l ($cmd, $xargs) { next unless $l; @$l = grep { !(m/^-[SgGFABb]$|^--build=/s and $dmode=$_) } @$l; } push @$cmd, '-nc'; #print STDERR "MASS1 ",Dumper($cmd, $xargs, $dmode); my $r = WANTSRC_BUILDER; printdebug "massage split $dmode.\n"; if ($dmode =~ s/^--build=//) { $r = 0; my @d = split /,/, $dmode; $r |= WANTSRC_SOURCE if grep { s/^full$/binary/ } @d; $r |= WANTSRC_SOURCE if grep { s/^source$// } @d; $r |= WANTSRC_BUILDER if grep { m/./ } @d; fail __ "Wanted to build nothing!" unless $r; $dmode = '--build='. join ',', grep m/./, @d; } else { $r = $dmode =~ m/[S]/ ? WANTSRC_SOURCE : $dmode =~ y/gGF/ABb/ ? WANTSRC_SOURCE | WANTSRC_BUILDER : $dmode =~ m/[ABb]/ ? WANTSRC_BUILDER : confess "$dmode ?"; } printdebug "massage done $r $dmode.\n"; push @$cmd, $dmode; #print STDERR "MASS2 ",Dumper($cmd, $xargs, $r); return $r; } sub in_bpd (&) { my ($fn) = @_; my $wasdir = must_getcwd(); changedir $buildproductsdir; $fn->(); changedir $wasdir; } # this sub must run with CWD=$buildproductsdir (eg in in_bpd) sub postbuild_mergechanges ($) { my ($msg_if_onlyone) = @_; # If there is only one .changes file, fail with $msg_if_onlyone, # or if that is undef, be a no-op. # Returns the changes file to report to the user. my $pat = changespat $version; my @changesfiles = grep { !m/_multi\.changes/ } glob $pat; @changesfiles = sort { ($b =~ m/_source\.changes$/ <=> $a =~ m/_source\.changes$/) or $a cmp $b } @changesfiles; my $result; if (@changesfiles==1) { fail +(f_ < !$includedirty return !$includedirty; } sub build_source { $sourcechanges = changespat $version,'source'; if (act_local()) { unlink "$buildproductsdir/$sourcechanges" or $!==ENOENT or fail f_ "remove %s: %s", $sourcechanges, $!; } # confess unless !!$made_split_brain == do_split_brain(); my @cmd = (@dpkgsource, qw(-b --)); my $leafdir; if (building_source_in_playtree()) { $leafdir = 'work'; my $headref = git_rev_parse('HEAD'); # If we are in split brain, there is already a playtree with # the thing we should package into a .dsc (thanks to quilt # fixup). If not, make a playtree prep_ud() unless $made_split_brain; changedir $playground; unless ($made_split_brain) { my $upstreamversion = upstreamversion $version; unpack_playtree_linkorigs($upstreamversion, sub { }); unpack_playtree_need_cd_work($headref); changedir '..'; } } else { $leafdir = basename $maindir; if ($buildproductsdir ne '..') { # Well, we are going to run dpkg-source -b which consumes # origs from .. and generates output there. To make this # work when the bpd is not .. , we would have to (i) link # origs from bpd to .. , (ii) check for files that # dpkg-source -b would/might overwrite, and afterwards # (iii) move all the outputs back to the bpd (iv) except # for the origs which should be deleted from .. if they # weren't there beforehand. And if there is an error and # we don't run to completion we would necessarily leave a # mess. This is too much. The real way to fix this # is for dpkg-source to have bpd support. confess unless $includedirty; fail __ "--include-dirty not supported with --build-products-dir, sorry"; } changedir '..'; } runcmd_ordryrun_local @cmd, $leafdir; changedir $leafdir; runcmd_ordryrun_local qw(sh -ec), 'exec >../$1; shift; exec "$@"','x', $sourcechanges, @dpkggenchanges, qw(-S), changesopts(); changedir '..'; printdebug "moving $dscfn, $sourcechanges, etc. to ".bpd_abs()."\n"; $dsc = parsecontrol($dscfn, "source package"); my $mv = sub { my ($why, $l) = @_; printdebug " renaming ($why) $l\n"; rename_link_xf 0, "$l", bpd_abs()."/$l" or fail f_ "put in place new built file (%s): %s", $l, $@; }; foreach my $l (split /\n/, getfield $dsc, 'Files') { $l =~ m/\S+$/ or next; $mv->('Files', $&); } $mv->('dsc', $dscfn); $mv->('changes', $sourcechanges); changedir $maindir; } sub cmd_build_source { badusage __ "build-source takes no additional arguments" if @ARGV; build_prep(WANTSRC_SOURCE); build_source(); maybe_unapply_patches_again(); printdone f_ "source built, results in %s and %s", $dscfn, $sourcechanges; } sub cmd_push_source { prep_push(); fail __ "dgit push-source: --include-dirty/--ignore-dirty does not make". "sense with push-source!" if $includedirty; build_check_quilt_splitbrain(); if ($changesfile) { my $changes = parsecontrol("$buildproductsdir/$changesfile", __ "source changes file"); unless (test_source_only_changes($changes)) { fail __ "user-specified changes file is not source-only"; } } else { # Building a source package is very fast, so just do it build_source(); confess "er, patches are applied dirtily but shouldn't be.." if $patches_applied_dirtily; $changesfile = $sourcechanges; } dopush(); } sub binary_builder { my ($bbuilder, $pbmc_msg, @args) = @_; build_prep(WANTSRC_SOURCE); build_source(); midbuild_checkchanges(); in_bpd { if (act_local()) { stat_exists $dscfn or fail f_ "%s (in build products dir): %s", $dscfn, $!; stat_exists $sourcechanges or fail f_ "%s (in build products dir): %s", $sourcechanges, $!; } runcmd_ordryrun_local @$bbuilder, @args; }; maybe_unapply_patches_again(); in_bpd { postbuild_mergechanges($pbmc_msg); }; } sub cmd_sbuild { build_prep_early(); binary_builder(\@sbuild, (__ <; }; D->error and fail f_ "read %s: %s", $dscfn, $!; close C; # we don't normally need this so import it here use Dpkg::Source::Package; my $dp = new Dpkg::Source::Package filename => $dscfn, require_valid_signature => $needsig; { local $SIG{__WARN__} = sub { print STDERR $_[0]; return unless $needsig; fail __ "import-dsc signature check failed"; }; if (!$dp->is_signed()) { warn f_ "%s: warning: importing unsigned .dsc\n", $us; } else { my $r = $dp->check_signature(); confess "->check_signature => $r" if $needsig && $r; } } parse_dscdata(); $package = getfield $dsc, 'Source'; parse_dsc_field($dsc, __ "Dgit metadata in .dsc") unless forceing [qw(import-dsc-with-dgit-field)]; parse_dsc_field_def_dsc_distro(); $isuite = 'DGIT-IMPORT-DSC'; $idistro //= $dsc_distro; notpushing(); if (defined $dsc_hash) { progress __ "dgit: import-dsc of .dsc with Dgit field, using git hash"; resolve_dsc_field_commit undef, undef; } if (defined $dsc_hash) { my @cmd = (qw(sh -ec), "echo $dsc_hash | git cat-file --batch-check"); my $objgot = cmdoutput @cmd; if ($objgot =~ m#^\w+ missing\b#) { fail f_ < 0) { progress __ "Not fast forward, forced update."; } else { fail f_ "Not fast forward to %s", $dsc_hash; } } import_dsc_result $dstbranch, $dsc_hash, "dgit import-dsc (Dgit): $info", f_ "updated git ref %s", $dstbranch; return 0; } fail f_ <{Filename}; # We transfer all the pieces of the dsc to the bpd, not just # origs. This is by analogy with dgit fetch, which wants to # keep them somewhere to avoid downloading them again. # We make symlinks, though. If the user wants copies, then # they can copy the parts of the dsc to the bpd using dcmd, # or something. my $here = "$buildproductsdir/$f"; if (lstat $here) { if (stat $here) { next; } fail f_ "lstat %s works but stat gives %s !", $here, $!; } fail f_ "stat %s: %s", $here, $! unless $! == ENOENT; printdebug "not in bpd, $f ...\n"; # $f does not exist in bpd, we need to transfer it my $there = $dscfn; $there =~ s{[^/]+$}{$f} or confess "$there ?"; # $there is file we want, relative to user's cwd, or abs printdebug "not in bpd, $f, test $there ...\n"; stat $there or fail f_ "import %s requires %s, but: %s", $dscfn, $there, $!; if ($there =~ m#^(?:\./+)?\.\./+#) { # $there is relative to user's cwd my $there_from_parent = $'; if ($buildproductsdir !~ m{^/}) { # abs2rel, despite its name, can take two relative paths $there = File::Spec->abs2rel($there,$buildproductsdir); # now $there is relative to bpd, great printdebug "not in bpd, $f, abs2rel, $there ...\n"; } else { $there = (dirname $maindir)."/$there_from_parent"; # now $there is absoute printdebug "not in bpd, $f, rel2rel, $there ...\n"; } } elsif ($there =~ m#^/#) { # $there is absolute already printdebug "not in bpd, $f, abs, $there ...\n"; } else { fail f_ "cannot import %s which seems to be inside working tree!", $dscfn; } symlink $there, $here or fail f_ "symlink %s to %s: %s", $there, $here, $!; progress f_ "made symlink %s -> %s", $here, $there; # print STDERR Dumper($fi); } my @mergeinputs = generate_commits_from_dsc(); die unless @mergeinputs == 1; my $newhash = $mergeinputs[0]{Commit}; if ($oldhash) { if ($force > 0) { progress __ "Import, forced update - synthetic orphan git history."; } elsif ($force < 0) { progress __ "Import, merging."; my $tree = cmdoutput @git, qw(rev-parse), "$newhash:"; my $version = getfield $dsc, 'Version'; my $clogp = commit_getclogp $newhash; my $authline = clogp_authline $clogp; $newhash = hash_commit_text <",@cmd; exec @cmd or fail f_ "exec curl: %s\n", $!; } sub repos_server_url () { $package = '_dgit-repos-server'; local $access_forpush = 1; local $isuite = 'DGIT-REPOS-SERVER'; my $url = access_giturl(); } sub pre_clone_dgit_repos_server () { not_necessarily_a_tree(); } sub cmd_clone_dgit_repos_server { badusage __ "need destination argument" unless @ARGV==1; my ($destdir) = @ARGV; my $url = repos_server_url(); my @cmd = (@git, qw(clone), $url, $destdir); debugcmd ">",@cmd; exec @cmd or fail f_ "exec git clone: %s\n", $!; } sub pre_print_dgit_repos_server_source_url () { not_necessarily_a_tree(); } sub cmd_print_dgit_repos_server_source_url { badusage __ "no arguments allowed to dgit print-dgit-repos-server-source-url" if @ARGV; my $url = repos_server_url(); print $url, "\n" or confess "$!"; } sub pre_print_dpkg_source_ignores { not_necessarily_a_tree(); } sub cmd_print_dpkg_source_ignores { badusage __ "no arguments allowed to dgit print-dpkg-source-ignores" if @ARGV; print "@dpkg_source_ignores\n" or confess "$!"; } sub cmd_setup_mergechangelogs { badusage __ "no arguments allowed to dgit setup-mergechangelogs" if @ARGV; local $isuite = 'DGIT-SETUP-TREE'; setup_mergechangelogs(1); } sub cmd_setup_useremail { badusage __ "no arguments allowed to dgit setup-useremail" if @ARGV; local $isuite = 'DGIT-SETUP-TREE'; setup_useremail(1); } sub cmd_setup_gitattributes { badusage __ "no arguments allowed to dgit setup-useremail" if @ARGV; local $isuite = 'DGIT-SETUP-TREE'; setup_gitattrs(1); } sub cmd_setup_new_tree { badusage __ "no arguments allowed to dgit setup-tree" if @ARGV; local $isuite = 'DGIT-SETUP-TREE'; setup_new_tree(); } #---------- argument parsing and main program ---------- sub cmd_version { print "dgit version $our_version\n" or confess "$!"; finish 0; } our (%valopts_long, %valopts_short); our (%funcopts_long); our @rvalopts; our (@modeopt_cfgs); sub defvalopt ($$$$) { my ($long,$short,$val_re,$how) = @_; my $oi = { Long => $long, Short => $short, Re => $val_re, How => $how }; $valopts_long{$long} = $oi; $valopts_short{$short} = $oi; # $how subref should: # do whatever assignemnt or thing it likes with $_[0] # if the option should not be passed on to remote, @rvalopts=() # or $how can be a scalar ref, meaning simply assign the value } defvalopt '--since-version', '-v', '[^_]+|_', \$changes_since_version; defvalopt '--distro', '-d', '.+', \$idistro; defvalopt '', '-k', '.+', \$keyid; defvalopt '--existing-package','', '.*', \$existing_package; defvalopt '--build-products-dir','','.*', \$buildproductsdir; defvalopt '--clean', '', $cleanmode_re, \$cleanmode; defvalopt '--package', '-p', $package_re, \$package; defvalopt '--quilt', '', $quilt_modes_re, \$quilt_mode; defvalopt '', '-C', '.+', sub { ($changesfile) = (@_); if ($changesfile =~ s#^(.*)/##) { $buildproductsdir = $1; } }; defvalopt '--initiator-tempdir','','.*', sub { ($initiator_tempdir) = (@_); $initiator_tempdir =~ m#^/# or badusage __ "--initiator-tempdir must be used specify an". " absolute, not relative, directory." }; sub defoptmodes ($@) { my ($varref, $cfgkey, $default, %optmap) = @_; my %permit; while (my ($opt,$val) = each %optmap) { $funcopts_long{$opt} = sub { $$varref = $val; }; $permit{$val} = $val; } push @modeopt_cfgs, { Var => $varref, Key => $cfgkey, Default => $default, Vals => \%permit }; } defoptmodes \$dodep14tag, qw( dep14tag want --dep14tag want --no-dep14tag no --always-dep14tag always ); sub parseopts () { my $om; if (defined $ENV{'DGIT_SSH'}) { @ssh = string_to_ssh $ENV{'DGIT_SSH'}; } elsif (defined $ENV{'GIT_SSH'}) { @ssh = ($ENV{'GIT_SSH'}); } my $oi; my $val; my $valopt = sub { my ($what) = @_; @rvalopts = ($_); if (!defined $val) { badusage f_ "%s needs a value", $what unless @ARGV; $val = shift @ARGV; push @rvalopts, $val; } badusage f_ "bad value \`%s' for %s", $val, $what unless $val =~ m/^$oi->{Re}$(?!\n)/s; my $how = $oi->{How}; if (ref($how) eq 'SCALAR') { $$how = $val; } else { $how->($val); } push @ropts, @rvalopts; }; while (@ARGV) { last unless $ARGV[0] =~ m/^-/; $_ = shift @ARGV; last if m/^--?$/; if (m/^--/) { if (m/^--dry-run$/) { push @ropts, $_; $dryrun_level=2; } elsif (m/^--damp-run$/) { push @ropts, $_; $dryrun_level=1; } elsif (m/^--no-sign$/) { push @ropts, $_; $sign=0; } elsif (m/^--help$/) { cmd_help(); } elsif (m/^--version$/) { cmd_version(); } elsif (m/^--new$/) { push @ropts, $_; $new_package=1; } elsif (m/^--([-0-9a-z]+)=(.+)/s && ($om = $opts_opt_map{$1}) && length $om->[0]) { push @ropts, $_; $om->[0] = $2; } elsif (m/^--([-0-9a-z]+):(.*)/s && !$opts_opt_cmdonly{$1} && ($om = $opts_opt_map{$1})) { push @ropts, $_; push @$om, $2; } elsif (m/^--([-0-9a-z]+)\!:(.*)/s && !$opts_opt_cmdonly{$1} && ($om = $opts_opt_map{$1})) { push @ropts, $_; my $cmd = shift @$om; @$om = ($cmd, grep { $_ ne $2 } @$om); } elsif (m/^--($quilt_options_re)$/s) { push @ropts, "--quilt=$1"; $quilt_mode = $1; } elsif (m/^--(?:ignore|include)-dirty$/s) { push @ropts, $_; $includedirty = 1; } elsif (m/^--no-quilt-fixup$/s) { push @ropts, $_; $quilt_mode = 'nocheck'; } elsif (m/^--no-rm-on-error$/s) { push @ropts, $_; $rmonerror = 0; } elsif (m/^--no-chase-dsc-distro$/s) { push @ropts, $_; $chase_dsc_distro = 0; } elsif (m/^--overwrite$/s) { push @ropts, $_; $overwrite_version = ''; } elsif (m/^--split-(?:view|brain)$/s) { push @ropts, $_; $splitview_mode = 'always'; } elsif (m/^--split-(?:view|brain)=($splitview_modes_re)$/s) { push @ropts, $_; $splitview_mode = $1; } elsif (m/^--overwrite=(.+)$/s) { push @ropts, $_; $overwrite_version = $1; } elsif (m/^--delayed=(\d+)$/s) { push @ropts, $_; push @dput, $_; } elsif (m/^--upstream-commitish=(.+)$/s) { push @ropts, $_; $quilt_upstream_commitish = $1; } elsif (m/^--save-(dgit-view)=(.+)$/s || m/^--(dgit-view)-save=(.+)$/s ) { my ($k,$v) = ($1,$2); push @ropts, $_; $v =~ s#^(?!refs/)#refs/heads/#; $internal_object_save{$k} = $v; } elsif (m/^--(no-)?rm-old-changes$/s) { push @ropts, $_; $rmchanges = !$1; } elsif (m/^--deliberately-($deliberately_re)$/s) { push @ropts, $_; push @deliberatelies, $&; } elsif (m/^--force-(.*)/ && defined $forceopts{$1}) { push @ropts, $&; $forceopts{$1} = 1; $_=''; } elsif (m/^--force-/) { print STDERR f_ "%s: warning: ignoring unknown force option %s\n", $us, $_; $_=''; } elsif (m/^--config-lookup-explode=(.+)$/s) { # undocumented, for testing push @ropts, $_; $gitcfgs{cmdline}{$1} = 'CONFIG-LOOKUP-EXPLODE'; # ^ it's supposed to be an array ref } elsif (m/^(--[-0-9a-z]+)(=|$)/ && ($oi = $valopts_long{$1})) { $val = $2 ? $' : undef; #'; $valopt->($oi->{Long}); } elsif ($funcopts_long{$_}) { push @ropts, $_; $funcopts_long{$_}(); } else { badusage f_ "unknown long option \`%s'", $_; } } else { while (m/^-./s) { if (s/^-n/-/) { push @ropts, $&; $dryrun_level=2; } elsif (s/^-L/-/) { push @ropts, $&; $dryrun_level=1; } elsif (s/^-h/-/) { cmd_help(); } elsif (s/^-D/-/) { push @ropts, $&; $debuglevel++; enabledebug(); } elsif (s/^-N/-/) { push @ropts, $&; $new_package=1; } elsif (m/^-m/) { push @ropts, $&; push @changesopts, $_; $_ = ''; } elsif (s/^-wn$//s) { push @ropts, $&; $cleanmode = 'none'; } elsif (s/^-wg(f?)(a?)$//s) { push @ropts, $&; $cleanmode = 'git'; $cleanmode .= '-ff' if $1; $cleanmode .= ',always' if $2; } elsif (s/^-wd(d?)([na]?)$//s) { push @ropts, $&; $cleanmode = 'dpkg-source'; $cleanmode .= '-d' if $1; $cleanmode .= ',no-check' if $2 eq 'n'; $cleanmode .= ',all-check' if $2 eq 'a'; } elsif (s/^-wc$//s) { push @ropts, $&; $cleanmode = 'check'; } elsif (s/^-wci$//s) { push @ropts, $&; $cleanmode = 'check,ignores'; } elsif (s/^-c([^=]*)\=(.*)$//s) { push @git, '-c', $&; $gitcfgs{cmdline}{$1} = [ $2 ]; } elsif (s/^-c([^=]+)$//s) { push @git, '-c', $&; $gitcfgs{cmdline}{$1} = [ 'true' ]; } elsif (m/^-[a-zA-Z]/ && ($oi = $valopts_short{$&})) { $val = $'; #'; $val = undef unless length $val; $valopt->($oi->{Short}); $_ = ''; } else { badusage f_ "unknown short option \`%s'", $_; } } } } } sub check_env_sanity () { my $blocked = new POSIX::SigSet; sigprocmask SIG_UNBLOCK, $blocked, $blocked or confess "$!"; eval { foreach my $name (qw(PIPE CHLD)) { my $signame = "SIG$name"; my $signum = eval "POSIX::$signame" // die; die f_ "%s is set to something other than SIG_DFL\n", $signame if defined $SIG{$name} and $SIG{$name} ne 'DEFAULT'; $blocked->ismember($signum) and die f_ "%s is blocked\n", $signame; } }; return unless $@; chomp $@; fail f_ <[0]; $om->[0] = $v; } foreach my $c (access_cfg_cfgs("opts-$k")) { my @vl = map { $_ ? @$_ : () } map { $gitcfgs{$_}{$c} } reverse @gitcfgsources; printdebug "CL $c ", (join " ", map { shellquote } @vl), "\n" if $debuglevel >= 4; next unless @vl; badcfg f_ "cannot configure options for %s", $k if $opts_opt_cmdonly{$k}; my $insertpos = $opts_cfg_insertpos{$k}; @$om = ( @$om[0..$insertpos-1], @vl, @$om[$insertpos..$#$om] ); } } if (!defined $rmchanges) { local $access_forpush; $rmchanges = access_cfg_bool(0, 'rm-old-changes'); } if (!defined $quilt_mode) { local $access_forpush; $quilt_mode = cfg('dgit.force.quilt-mode', 'RETURN-UNDEF') // access_cfg('quilt-mode', 'RETURN-UNDEF') // 'linear'; $quilt_mode =~ m/^($quilt_modes_re)$/ or badcfg f_ "unknown quilt-mode \`%s'", $quilt_mode; $quilt_mode = $1; } $quilt_mode =~ s/^(baredebian)\+git$/$1/; foreach my $moc (@modeopt_cfgs) { local $access_forpush; my $vr = $moc->{Var}; next if defined $$vr; $$vr = access_cfg($moc->{Key}, 'RETURN-UNDEF') // $moc->{Default}; my $v = $moc->{Vals}{$$vr}; badcfg f_ "unknown %s setting \`%s'", $moc->{Key}, $$vr unless defined $v; $$vr = $v; } { local $access_forpush; default_from_access_cfg(\$cleanmode, 'clean-mode', 'dpkg-source', $cleanmode_re); } $buildproductsdir //= access_cfg('build-products-dir', 'RETURN-UNDEF'); $buildproductsdir //= '..'; $bpd_glob = $buildproductsdir; $bpd_glob =~ s#[][\\{}*?~]#\\$&#g; } setlocale(LC_MESSAGES, ""); textdomain("dgit"); if ($ENV{$fakeeditorenv}) { git_slurp_config(); quilt_fixup_editor(); } parseopts(); check_env_sanity(); print STDERR __ "DRY RUN ONLY\n" if $dryrun_level > 1; print STDERR __ "DAMP RUN - WILL MAKE LOCAL (UNSIGNED) CHANGES\n" if $dryrun_level == 1; if (!@ARGV) { print STDERR __ $helpmsg or confess "$!"; finish 8; } $cmd = $subcommand = shift @ARGV; $cmd =~ y/-/_/; my $pre_fn = ${*::}{"pre_$cmd"}; $pre_fn->() if $pre_fn; if ($invoked_in_git_tree) { changedir_git_toplevel(); record_maindir(); } git_slurp_config(); my $fn = ${*::}{"cmd_$cmd"}; $fn or badusage f_ "unknown operation %s", $cmd; $fn->(); finish 0;