summaryrefslogtreecommitdiff
path: root/dh_compress
diff options
context:
space:
mode:
Diffstat (limited to 'dh_compress')
-rwxr-xr-xdh_compress275
1 files changed, 154 insertions, 121 deletions
diff --git a/dh_compress b/dh_compress
index e908dd82..ac726879 100755
--- a/dh_compress
+++ b/dh_compress
@@ -1,4 +1,4 @@
-#!/usr/bin/perl -w
+#!/usr/bin/perl
=head1 NAME
@@ -7,9 +7,13 @@ dh_compress - compress files and fix symlinks in package build directories
=cut
use strict;
-use Cwd;
+use warnings;
+use Cwd qw(getcwd abs_path);
+use File::Spec::Functions qw(abs2rel);
use Debian::Debhelper::Dh_Lib;
+our $VERSION = DH_BUILTIN_VERSION;
+
=head1 SYNOPSIS
B<dh_compress> [S<I<debhelper options>>] [B<-X>I<item>] [B<-A>] [S<I<file> ...>]
@@ -75,136 +79,165 @@ Debian policy, version 3.0
init();
-foreach my $package (@{$dh{DOPACKAGES}}) {
- my $tmp=tmpdir($package);
+on_pkgs_in_parallel {
+ my $olddir;
- my $compress=pkgfile($package,"compress");
+ foreach my $package (@_) {
+ my $tmp=tmpdir($package);
- # Run the file name gathering commands from within the directory
- # structure that will be effected.
- next unless -d $tmp;
- my $olddir=getcwd();
- verbose_print("cd $tmp");
- chdir($tmp) || error("Can't cd to $tmp: $!");
+ my $compress=pkgfile($package,"compress");
- # Figure out what files to compress.
- my @files;
- # First of all, deal with any files specified right on the command line.
- if (($package eq $dh{FIRSTPACKAGE} || $dh{PARAMS_ALL}) && @ARGV) {
- push @files, @ARGV;
- }
- if ($compress) {
- # The compress file is a sh script that outputs the files to be compressed
- # (typically using find).
- warning("$compress is deprecated; use -X or avoid calling dh_compress instead");
- push @files, split(/\n/,`sh $olddir/$compress 2>/dev/null`);
- }
- else {
- # Note that all the excludes of odd things like _z
- # are because gzip refuses to compress such files, assuming
- # they are zip files. I looked at the gzip source to get the
- # complete list of such extensions: ".gz", ".z", ".taz",
- # ".tgz", "-gz", "-z", "_z"
- push @files, split(/\n/,`
- find usr/info usr/share/info usr/man usr/share/man usr/X11*/man -type f ! -iname "*.gz" \\
- ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
- ! -iname "*.jpeg" \\
- 2>/dev/null || true;
- find usr/share/doc \\
- \\( -type d -name _sources -prune -false \\) -o \\
- -type f \\( -size +4k -or -name "changelog*" -or -name "NEWS*" \\) \\
- \\( -name changelog.html -or ! -iname "*.htm*" \\) \\
- ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
- ! -iname "*.jpeg" ! -iname "*.gz" ! -iname "*.taz" \\
- ! -iname "*.tgz" ! -iname "*.z" ! -iname "*.bz2" \\
- ! -iname "*-gz" ! -iname "*-z" ! -iname "*_z" \\
- ! -iname "*.epub" ! -iname "*.jar" ! -iname "*.zip" \\
- ! -iname "*.odg" ! -iname "*.odp" ! -iname "*.odt" \\
- ! -iname ".htaccess" ! -iname "*.css" \\
- ! -iname "*.svg" ! -iname "*.svgz" ! -iname "*.js" \\
- ! -name "index.sgml" ! -name "objects.inv" ! -name "*.map" \\
- ! -name "copyright" 2>/dev/null || true;
- find usr/share/fonts/X11 -type f -name "*.pcf" 2>/dev/null || true;
- `);
- }
+ # Run the file name gathering commands from within the directory
+ # structure that will be effected.
+ next unless -d $tmp;
+ my $ignore_doc_dirs = '-name _sources';
+ if (not compat(11)) {
+ my $target_package = compute_doc_main_package($package);
+ $ignore_doc_dirs .= qq{ -o -path "usr/share/doc/${package}/examples"};
+ $ignore_doc_dirs .= qq{ -o -path "usr/share/doc/${target_package}/examples"}
+ if $target_package and $target_package ne $package;
+ }
+ $olddir = getcwd() if not defined $olddir;
+ verbose_print("cd $tmp");
+ chdir($tmp) || error("Can't cd to $tmp: $!");
+
+ # Figure out what files to compress.
+ my @files;
+ # First of all, deal with any files specified right on the command line.
+ if (($package eq $dh{FIRSTPACKAGE} || $dh{PARAMS_ALL}) && @ARGV) {
+ push @files, map { s{^/+}{}; $_ } @ARGV;
+ }
+ if ($compress) {
+ # The compress file is a sh script that outputs the files to be compressed
+ # (typically using find).
+ warning("$compress is deprecated; use -X or avoid calling dh_compress instead");
+ push @files, split(/\n/,`sh $olddir/$compress 2>/dev/null`);
+ } else {
+ # Note that all the excludes of odd things like _z
+ # are because gzip refuses to compress such files, assuming
+ # they are zip files. I looked at the gzip source to get the
+ # complete list of such extensions: ".gz", ".z", ".taz",
+ # ".tgz", "-gz", "-z", "_z"
+ push @files, split(/\n/,`
+ find usr/share/info usr/share/man -type f ! -iname "*.gz" \\
+ ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
+ ! -iname "*.jpeg" \\
+ 2>/dev/null || true;
+ find usr/share/doc \\
+ \\( -type d \\( $ignore_doc_dirs \\) -prune -false \\) -o \\
+ -type f \\( -size +4k -o -name "changelog*" -o -name "NEWS*" \\) \\
+ \\( -name changelog.html -o ! -iname "*.htm*" \\) \\
+ ! -iname "*.xhtml" \\
+ ! -iname "*.gif" ! -iname "*.png" ! -iname "*.jpg" \\
+ ! -iname "*.jpeg" ! -iname "*.gz" ! -iname "*.taz" \\
+ ! -iname "*.tgz" ! -iname "*.z" ! -iname "*.bz2" \\
+ ! -iname "*-gz" ! -iname "*-z" ! -iname "*_z" \\
+ ! -iname "*.epub" ! -iname "*.jar" ! -iname "*.zip" \\
+ ! -iname "*.odg" ! -iname "*.odp" ! -iname "*.odt" \\
+ ! -iname ".htaccess" ! -iname "*.css" \\
+ ! -iname "*.xz" ! -iname "*.lz" ! -iname "*.lzma" \\
+ ! -iname "*.haddock" ! -iname "*.hs" \\
+ ! -iname "*.woff" ! -iname "*.woff2" \\
+ ! -iname "*.svg" ! -iname "*.svgz" ! -iname "*.js" \\
+ ! -name "index.sgml" ! -name "objects.inv" ! -name "*.map" \\
+ ! -name "*.devhelp2" ! -name "search_index.json" \\
+ ! -name "copyright" 2>/dev/null || true;
+ find usr/share/fonts/X11 -type f -name "*.pcf" 2>/dev/null || true;
+ `);
+ }
- # Exclude files from compression.
- if (@files && defined($dh{EXCLUDE}) && $dh{EXCLUDE}) {
- my @new=();
- foreach (@files) {
- my $ok=1;
- foreach my $x (@{$dh{EXCLUDE}}) {
- if (/\Q$x\E/) {
- $ok='';
- last;
- }
- }
- push @new,$_ if $ok;
+ # Exclude files from compression.
+ if (@files && defined($dh{EXCLUDE}) && $dh{EXCLUDE}) {
+ my @new = grep { not excludefile($_) } @files;
+ @files=@new;
}
- @files=@new;
- }
-
- # Look for files with hard links. If we are going to compress both,
- # we can preserve the hard link across the compression and save
- # space in the end.
- my @f=();
- my %hardlinks;
- my %seen;
- foreach (@files) {
- my ($dev, $inode, undef, $nlink)=stat($_);
- if ($nlink > 1) {
- if (! $seen{"$inode.$dev"}) {
- $seen{"$inode.$dev"}=$_;
- push @f, $_;
- }
- else {
- # This is a hardlink.
- $hardlinks{$_}=$seen{"$inode.$dev"};
- }
+
+ # Look for files with hard links. If we are going to compress both,
+ # we can preserve the hard link across the compression and save
+ # space in the end.
+ my ($unique_files, $hardlinks) = find_hardlinks(@files);
+ my @f = @{$unique_files};
+
+ # normalize file names and remove duplicates
+ my $norm_from_dir = $tmp;
+ if ($norm_from_dir !~ m{^/}) {
+ $norm_from_dir = "${olddir}/${tmp}";
}
- else {
- push @f, $_;
+ my $resolved = abs_path($norm_from_dir)
+ or error("Cannot resolve $norm_from_dir: $!");
+ my @normalized = normalize_paths($norm_from_dir, $resolved, $tmp, @f);
+ my %uniq_f; @uniq_f{@normalized} = ();
+ @f = sort keys %uniq_f;
+
+ # do it
+ if (@f) {
+ # Make executables not be anymore.
+ xargs(\@f,"chmod","a-x");
+ xargs(\@f,"gzip","-9nf");
}
- }
- if (@f) {
- # Make executables not be anymore.
- xargs(\@f,"chmod","a-x");
-
- xargs(\@f,"gzip","-9nf");
- }
-
- # Now change over any files we can that used to be hard links so
- # they are again.
- foreach (keys %hardlinks) {
- # Remove old file.
- doit("rm","-f","$_");
- # Make new hardlink.
- doit("ln","$hardlinks{$_}.gz","$_.gz");
- }
+ # Now change over any files we can that used to be hard links so
+ # they are again.
+ foreach (keys %{$hardlinks}) {
+ # Remove old file.
+ rm_files($_);
+ # Make new hardlink.
+ doit("ln", "-f", "$hardlinks->{$_}.gz", "$_.gz");
+ }
- verbose_print("cd '$olddir'");
- chdir($olddir);
-
- # Fix up symlinks that were pointing to the uncompressed files.
- my %links = map { chomp; $_ => 1 } `find $tmp -type l`;
- my $changed;
- # Keep looping through looking for broken links until no more
- # changes are made. This is done in case there are links pointing
- # to links, pointing to compressed files.
- do {
- $changed = 0;
- foreach my $link (keys %links) {
- my ($directory) = $link =~ m:(.*)/:;
- my $linkval = readlink($link);
- if (! -e "$directory/$linkval" && -e "$directory/$linkval.gz") {
- doit("rm","-f",$link);
- doit("ln","-sf","$linkval.gz","$link.gz");
- delete $links{$link};
- $changed++;
+ verbose_print("cd '$olddir'");
+ chdir($olddir);
+
+ # Fix up symlinks that were pointing to the uncompressed files.
+ my %links = map { chomp; $_ => 1 } qx_cmd('find', $tmp, '-type', 'l');
+ my $changed;
+ # Keep looping through looking for broken links until no more
+ # changes are made. This is done in case there are links pointing
+ # to links, pointing to compressed files.
+ do {
+ $changed = 0;
+ foreach my $link (keys %links) {
+ my ($directory) = $link =~ m:(.*)/:;
+ my $linkval = readlink($link);
+ if (! -e "$directory/$linkval" && -e "$directory/$linkval.gz") {
+ # Avoid duplicate ".gz.gz" if the link already has
+ # the .gz extension. This can happen via
+ # dh_installman when the .so is already compressed
+ # and then dh_installman reencodes the target
+ # manpage.
+ my $link_name = $link;
+ $link_name .= '.gz' if $link_name !~ m/[.]gz$/;
+ rm_files($link, "$link.gz");
+ make_symlink_raw_target("$linkval.gz", $link_name);
+ delete $links{$link};
+ $changed++;
+ }
}
+ } while $changed;
+ }
+};
+
+sub normalize_paths {
+ my ($cwd, $cwd_resolved, $tmp, @paths) = @_;
+ my @normalized;
+ my $prefix = qr{\Q${tmp}/};
+
+ for my $path (@paths) {
+ my $abs = abs_path($path);
+ if (not defined($abs)) {
+ my $err = $!;
+ my $alt = $path;
+ if ($alt =~ s/^$prefix//) {
+ $abs = abs_path($alt);
+ }
+ error(qq{Cannot resolve "$path": $err (relative to "${cwd}")})
+ if (not defined($abs));
+ warning(qq{Interpreted "$path" as "$alt"});
}
- } while $changed;
+ error("${abs} does not exist") if not -e $abs;
+ push(@normalized, abs2rel($abs, $cwd_resolved));
+ }
+ return @normalized;
}
=head1 SEE ALSO