summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/backup/hot-backup.py.in359
-rwxr-xr-xtools/bdb/erase-all-text-data.py94
-rw-r--r--tools/bdb/skel.py226
-rwxr-xr-xtools/bdb/svn-bdb-view.py295
-rw-r--r--tools/bdb/svnfs.py97
-rwxr-xr-xtools/bdb/whatis-rep.py76
-rw-r--r--tools/buildbot/master/README7
-rw-r--r--tools/buildbot/slaves/README92
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svnbuild.sh28
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh32
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svncheck.sh36
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svnclean.sh33
-rw-r--r--tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c35
-rwxr-xr-xtools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh49
-rwxr-xr-xtools/buildbot/slaves/i686-debian-sarge1/svncheck.sh45
-rwxr-xr-xtools/buildbot/slaves/i686-debian-sarge1/svnclean.sh32
-rwxr-xr-xtools/buildbot/slaves/i686-debian-sarge1/svnlog.sh33
-rw-r--r--tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c36
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/mount-tmpfs.c38
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/rebuild-svn-bits.sh219
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/serf.patch19
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh51
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svncheck.sh42
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh30
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh23
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/twisted.patch11
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh34
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnbuild.sh80
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh40
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svncheck.sh54
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnclean.sh32
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnlog.sh33
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh44
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh40
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh31
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh31
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh44
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh40
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh31
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh31
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh63
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh62
-rw-r--r--tools/buildbot/slaves/svn-x64-macosx/setenv.sh69
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh45
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnbuild.sh108
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh59
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svncheck.sh98
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnclean.sh27
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svnbuild.sh51
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh39
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svncheck.sh40
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svnclean.sh29
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svnlog.sh33
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template50
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd98
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd48
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd36
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd80
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd46
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd24
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd98
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/config.bat40
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/do_all.bat24
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat56
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat76
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat30
-rw-r--r--tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat30
-rw-r--r--tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl44
-rw-r--r--tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat77
-rw-r--r--tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat51
-rw-r--r--tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat28
-rw-r--r--tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat25
-rw-r--r--tools/client-side/bash_completion1625
-rwxr-xr-xtools/client-side/bash_completion_test200
-rwxr-xr-xtools/client-side/change-svn-wc-format.py420
-rwxr-xr-xtools/client-side/mergeinfo-sanitizer.py319
-rwxr-xr-xtools/client-side/server-version.py92
-rwxr-xr-xtools/client-side/svn-graph.pl254
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/analyze-cmd.c61
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/help-cmd.c191
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/log.c1032
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/logic.c1915
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/mergeinfo-normalizer.h398
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/missing-branches.c365
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/normalize-cmd.c53
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/remove-branches-cmd.c59
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/svn-mergeinfo-normalizer.c974
-rw-r--r--tools/client-side/svn-mergeinfo-normalizer/wc_mergeinfo.c491
-rwxr-xr-xtools/client-side/svn-ssl-fingerprints.sh33
-rwxr-xr-xtools/client-side/svn-vendor.py1188
-rwxr-xr-xtools/client-side/svn-viewspec.py348
-rw-r--r--tools/client-side/svnconflict/README27
-rw-r--r--tools/client-side/svnconflict/svnconflict.c981
-rwxr-xr-xtools/client-side/wcfind81
-rwxr-xr-xtools/dev/analyze-svnlogs.py193
-rw-r--r--tools/dev/aprerr.txt139
-rw-r--r--tools/dev/benchmarks/RepoPerf/ClearMemory.cpp55
-rw-r--r--tools/dev/benchmarks/RepoPerf/TimeWin.cpp118
-rw-r--r--tools/dev/benchmarks/RepoPerf/copy_repo.py313
-rw-r--r--tools/dev/benchmarks/RepoPerf/win_repo_bench.py268
-rwxr-xr-xtools/dev/benchmarks/large_dirs/create_bigdir.sh232
-rwxr-xr-xtools/dev/benchmarks/suite1/benchmark.py1309
-rwxr-xr-xtools/dev/benchmarks/suite1/cronjob102
-rw-r--r--tools/dev/benchmarks/suite1/crontab.entry5
-rwxr-xr-xtools/dev/benchmarks/suite1/generate_charts60
-rwxr-xr-xtools/dev/benchmarks/suite1/run145
-rw-r--r--tools/dev/benchmarks/suite1/run.bat105
-rwxr-xr-xtools/dev/build-svn-deps-win.pl919
-rwxr-xr-xtools/dev/check-license.py142
-rwxr-xr-xtools/dev/contribulyze.py767
-rwxr-xr-xtools/dev/datecheck.py102
-rwxr-xr-xtools/dev/find-bad-style.py57
-rwxr-xr-xtools/dev/find-control-statements.py178
-rwxr-xr-xtools/dev/find-unmoved-deprecated.sh36
-rw-r--r--tools/dev/fsfs-access-map.c794
-rw-r--r--tools/dev/gdb-py/README29
-rw-r--r--tools/dev/gdb-py/svndbg/__init__.py0
-rw-r--r--tools/dev/gdb-py/svndbg/printers.py417
-rwxr-xr-xtools/dev/gen-javahl-errors.py86
-rwxr-xr-xtools/dev/gen-py-errors.py109
-rwxr-xr-xtools/dev/gen_junit_report.py301
-rwxr-xr-xtools/dev/gnuify-changelog.pl164
-rwxr-xr-xtools/dev/graph-dav-servers.py194
-rwxr-xr-xtools/dev/histogram.py54
-rw-r--r--tools/dev/iz/defect.dem6
-rwxr-xr-xtools/dev/iz/ff2csv.command27
-rwxr-xr-xtools/dev/iz/ff2csv.py189
-rwxr-xr-xtools/dev/iz/find-fix.py454
-rwxr-xr-xtools/dev/iz/run-queries.sh62
-rwxr-xr-xtools/dev/lock-check.py114
-rwxr-xr-xtools/dev/log_revnum_change_asf.py97
-rwxr-xr-xtools/dev/merge-graph.py58
-rw-r--r--tools/dev/mergegraph/__init__.py20
-rw-r--r--tools/dev/mergegraph/mergegraph.py313
-rw-r--r--tools/dev/mergegraph/save_as_sh.py137
-rwxr-xr-xtools/dev/min-includes.sh80
-rwxr-xr-xtools/dev/mklog.py49
-rwxr-xr-xtools/dev/mlpatch.py167
-rwxr-xr-xtools/dev/normalize-dump.py137
-rwxr-xr-xtools/dev/po-merge.py197
-rwxr-xr-xtools/dev/prebuild-cleanup.sh45
-rwxr-xr-xtools/dev/random-commits.py50
-rwxr-xr-xtools/dev/remove-trailing-whitespace.sh24
-rwxr-xr-xtools/dev/sbox-ospath.py64
-rwxr-xr-xtools/dev/scramble-tree.py304
-rwxr-xr-xtools/dev/stress.pl498
-rw-r--r--tools/dev/svn-dev.el566
-rw-r--r--tools/dev/svn-dev.vim76
-rw-r--r--tools/dev/svn-entries.el156
-rwxr-xr-xtools/dev/svn-merge-revs.py122
-rw-r--r--tools/dev/svnmover/linenoise/LICENSE25
-rw-r--r--tools/dev/svnmover/linenoise/README.markdown52
-rw-r--r--tools/dev/svnmover/linenoise/linenoise.c1112
-rw-r--r--tools/dev/svnmover/linenoise/linenoise.h66
-rw-r--r--tools/dev/svnmover/merge3.c1399
-rw-r--r--tools/dev/svnmover/ra.c586
-rw-r--r--tools/dev/svnmover/scanlog.c517
-rw-r--r--tools/dev/svnmover/svnmover.c4759
-rw-r--r--tools/dev/svnmover/svnmover.h295
-rw-r--r--tools/dev/svnmover/util.c59
-rwxr-xr-xtools/dev/svnqlite3-dump50
-rw-r--r--tools/dev/svnraisetreeconflict/svnraisetreeconflict.c415
-rwxr-xr-xtools/dev/trails.py229
-rw-r--r--tools/dev/unix-build/Makefile.svn2112
-rw-r--r--tools/dev/unix-build/README96
-rwxr-xr-xtools/dev/verify-history.py97
-rwxr-xr-xtools/dev/warn-ignored-err.sh83
-rwxr-xr-xtools/dev/wc-format.py64
-rwxr-xr-xtools/dev/wc-ng/bump-to-19.py357
-rwxr-xr-xtools/dev/wc-ng/count-progress.py117
-rwxr-xr-xtools/dev/wc-ng/gather-data.sh78
-rwxr-xr-xtools/dev/wc-ng/graph-data.py70
-rwxr-xr-xtools/dev/wc-ng/populate-pristine.py108
-rw-r--r--tools/dev/wc-ng/svn-wc-db-tester.c269
-rwxr-xr-xtools/dev/which-error.py142
-rw-r--r--tools/dev/windows-build/Makefile155
-rw-r--r--tools/dev/windows-build/README22
-rw-r--r--tools/dev/windows-build/document-version.pl48
-rw-r--r--tools/dev/x509-parser.c179
-rw-r--r--tools/diff/diff.c164
-rw-r--r--tools/diff/diff3.c228
-rw-r--r--tools/diff/diff4.c94
-rw-r--r--tools/dist/README.advisory78
-rw-r--r--tools/dist/README.backport65
-rwxr-xr-xtools/dist/advisory.py182
-rwxr-xr-xtools/dist/backport.pl1325
-rw-r--r--tools/dist/backport/__init__.py0
-rw-r--r--tools/dist/backport/merger.py280
-rw-r--r--tools/dist/backport/status.py705
-rw-r--r--tools/dist/backport_tests.py694
-rw-r--r--tools/dist/backport_tests_data/backport_accept.dump550
-rw-r--r--tools/dist/backport_tests_data/backport_branch_with_original_revision.dump672
-rw-r--r--tools/dist/backport_tests_data/backport_branches.dump642
-rw-r--r--tools/dist/backport_tests_data/backport_indented_entry.dump522
-rw-r--r--tools/dist/backport_tests_data/backport_multirevisions.dump534
-rw-r--r--tools/dist/backport_tests_data/backport_two_approveds.dump961
-rw-r--r--tools/dist/backport_tests_data/backport_unicode_entry.dump524
-rwxr-xr-xtools/dist/backport_tests_pl.py53
-rwxr-xr-xtools/dist/backport_tests_py.py54
-rwxr-xr-xtools/dist/checksums.py113
-rwxr-xr-xtools/dist/detect-conflicting-backports.py123
-rwxr-xr-xtools/dist/dist.sh416
-rwxr-xr-xtools/dist/extract-for-examination.sh37
-rwxr-xr-xtools/dist/merge-approved-backports.py53
-rwxr-xr-xtools/dist/nightly.sh98
l---------tools/dist/nominate.pl1
-rw-r--r--tools/dist/rat-excludes49
-rwxr-xr-xtools/dist/release.py1367
-rw-r--r--tools/dist/security/__init__.py18
-rw-r--r--tools/dist/security/_gnupg.py1486
-rw-r--r--tools/dist/security/adviser.py62
-rw-r--r--tools/dist/security/mailer.py322
-rw-r--r--tools/dist/security/mailinglist.py56
-rw-r--r--tools/dist/security/parser.py280
-rw-r--r--tools/dist/templates/download.ezt17
-rw-r--r--tools/dist/templates/rc-news.ezt22
-rw-r--r--tools/dist/templates/rc-release-ann.ezt69
-rw-r--r--tools/dist/templates/stable-news.ezt22
-rw-r--r--tools/dist/templates/stable-release-ann.ezt51
-rwxr-xr-xtools/dist/test.sh62
-rw-r--r--tools/examples/ExampleAuthn.java108
-rw-r--r--tools/examples/ExampleAuthnOld.java119
-rw-r--r--tools/examples/ExampleAuthnVeryOld.java118
-rwxr-xr-xtools/examples/SvnCLBrowse489
-rwxr-xr-xtools/examples/blame.py113
-rwxr-xr-xtools/examples/check-modified.py65
-rwxr-xr-xtools/examples/dumpprops.py88
-rwxr-xr-xtools/examples/get-location-segments.py159
-rwxr-xr-xtools/examples/getfile.py72
-rw-r--r--tools/examples/getlocks_test.c271
-rwxr-xr-xtools/examples/geturl.py47
-rw-r--r--tools/examples/headrev.c226
-rw-r--r--tools/examples/info.rb91
-rw-r--r--tools/examples/minimal_client.c285
-rwxr-xr-xtools/examples/putfile.py90
-rwxr-xr-xtools/examples/revplist.py78
-rwxr-xr-xtools/examples/svnlog2html.rb139
-rwxr-xr-xtools/examples/svnlook.py560
-rwxr-xr-xtools/examples/svnlook.rb516
-rw-r--r--tools/examples/svnput.c352
-rw-r--r--tools/examples/svnserve-sgid.c60
-rwxr-xr-xtools/examples/svnshell.py367
-rwxr-xr-xtools/examples/svnshell.rb456
-rw-r--r--tools/examples/testwrite.c276
-rwxr-xr-xtools/examples/walk-config-auth.py76
-rwxr-xr-xtools/hook-scripts/CVE-2017-9800-pre-commit.py74
-rw-r--r--tools/hook-scripts/commit-access-control.cfg.example74
-rwxr-xr-xtools/hook-scripts/commit-access-control.pl.in411
-rwxr-xr-xtools/hook-scripts/commit-email.rb122
-rwxr-xr-xtools/hook-scripts/control-chars.py130
-rwxr-xr-xtools/hook-scripts/log-police.py148
-rw-r--r--tools/hook-scripts/mailer/mailer.conf.example374
-rwxr-xr-xtools/hook-scripts/mailer/mailer.py1483
-rwxr-xr-xtools/hook-scripts/mailer/tests/mailer-init.sh116
-rw-r--r--tools/hook-scripts/mailer/tests/mailer-t1.output751
-rwxr-xr-xtools/hook-scripts/mailer/tests/mailer-t1.sh60
-rwxr-xr-xtools/hook-scripts/mailer/tests/mailer-tweak.py66
-rw-r--r--tools/hook-scripts/mailer/tests/mailer.conf365
-rwxr-xr-xtools/hook-scripts/persist-ephemeral-txnprops.py70
-rwxr-xr-xtools/hook-scripts/reject-detected-sha1-collisions.sh50
-rwxr-xr-xtools/hook-scripts/reject-known-sha1-collisions.sh50
-rwxr-xr-xtools/hook-scripts/svn2feed.py466
-rw-r--r--tools/hook-scripts/svnperms.conf.example98
-rwxr-xr-xtools/hook-scripts/svnperms.py363
-rwxr-xr-xtools/hook-scripts/validate-extensions.py110
-rw-r--r--tools/hook-scripts/validate-files.conf.example69
-rwxr-xr-xtools/hook-scripts/validate-files.py159
-rwxr-xr-xtools/hook-scripts/verify-po.py128
-rwxr-xr-xtools/po/l10n-report.py246
-rwxr-xr-xtools/po/po-update.sh123
-rwxr-xr-xtools/server-side/fsfs-reshard.py399
-rw-r--r--tools/server-side/mod_dontdothat/README53
-rw-r--r--tools/server-side/mod_dontdothat/mod_dontdothat.c711
-rwxr-xr-xtools/server-side/svn-backup-dumps.py692
-rw-r--r--tools/server-side/svn-populate-node-origins-index.c187
-rwxr-xr-xtools/server-side/svn_server_log_parse.py460
-rw-r--r--tools/server-side/svnauthz.c745
-rwxr-xr-xtools/server-side/svnpredumpfilter.py338
-rw-r--r--tools/server-side/svnpubsub/README.txt24
-rwxr-xr-xtools/server-side/svnpubsub/commit-hook.py92
-rw-r--r--tools/server-side/svnpubsub/daemonize.py339
-rwxr-xr-xtools/server-side/svnpubsub/irkerbridge.py329
l---------tools/server-side/svnpubsub/rc.d/svnpubsub1
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnpubsub.debian62
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnpubsub.freebsd37
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnpubsub.solaris53
l---------tools/server-side/svnpubsub/rc.d/svnwcsub1
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnwcsub.debian65
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnwcsub.freebsd39
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnwcsub.solaris56
-rwxr-xr-xtools/server-side/svnpubsub/revprop-change-hook.py90
-rw-r--r--tools/server-side/svnpubsub/svnpubsub.tac33
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/__init__.py1
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/client.py252
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/server.py289
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/util.py36
-rwxr-xr-xtools/server-side/svnpubsub/svntweet.py243
-rw-r--r--tools/server-side/svnpubsub/svnwcsub.conf.example16
-rwxr-xr-xtools/server-side/svnpubsub/svnwcsub.py559
-rwxr-xr-xtools/server-side/svnpubsub/testserver.py50
-rwxr-xr-xtools/server-side/svnpubsub/watcher.py58
-rwxr-xr-xtools/server-side/test_svn_server_log_parse.py611
-rw-r--r--tools/xslt/svnindex.css108
-rw-r--r--tools/xslt/svnindex.xsl123
304 files changed, 71890 insertions, 0 deletions
diff --git a/tools/backup/hot-backup.py.in b/tools/backup/hot-backup.py.in
new file mode 100755
index 0000000..76bc91d
--- /dev/null
+++ b/tools/backup/hot-backup.py.in
@@ -0,0 +1,359 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# hot-backup.py: perform a "hot" backup of a Subversion repository
+# and clean any old Berkeley DB logfiles after the
+# backup completes, if the repository backend is
+# Berkeley DB.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/backup/hot-backup.py.in $
+# $LastChangedDate: 2015-12-21 15:02:31 +0000 (Mon, 21 Dec 2015) $
+# $LastChangedBy: danielsh $
+# $LastChangedRevision: 1721179 $
+
+######################################################################
+
+import sys, os, getopt, stat, re, time, shutil, subprocess
+
+######################################################################
+# Global Settings
+
+# Path to svnlook utility
+svnlook = r"@SVN_BINDIR@/svnlook"
+
+# Path to svnadmin utility
+svnadmin = r"@SVN_BINDIR@/svnadmin"
+
+# Default number of backups to keep around (0 for "keep them all")
+num_backups = int(os.environ.get("SVN_HOTBACKUP_BACKUPS_NUMBER", 64))
+
+# Archive types/extensions
+archive_map = {
+ 'gz' : ".tar.gz",
+ 'bz2' : ".tar.bz2",
+ 'zip' : ".zip",
+ 'zip64' : ".zip"
+ }
+
+# Chmod recursively on a whole subtree
+def chmod_tree(path, mode, mask):
+ for dirpath, dirs, files in os.walk(path):
+ for name in dirs + files:
+ fullname = os.path.join(dirpath, name)
+ if not os.path.islink(fullname):
+ new_mode = (os.stat(fullname)[stat.ST_MODE] & ~mask) | mode
+ os.chmod(fullname, new_mode)
+
+# For clearing away read-only directories
+def safe_rmtree(dirname, retry=0):
+ "Remove the tree at DIRNAME, making it writable first"
+ def rmtree(dirname):
+ chmod_tree(dirname, 0o666, 0o666)
+ shutil.rmtree(dirname)
+
+ if not os.path.exists(dirname):
+ return
+
+ if retry:
+ for delay in (0.5, 1, 2, 4):
+ try:
+ rmtree(dirname)
+ break
+ except:
+ time.sleep(delay)
+ else:
+ rmtree(dirname)
+ else:
+ rmtree(dirname)
+
+######################################################################
+# Command line arguments
+
+def usage(out = sys.stdout):
+ scriptname = os.path.basename(sys.argv[0])
+ out.write(
+"""USAGE: %s [OPTIONS] REPOS_PATH BACKUP_PATH
+
+Create a backup of the repository at REPOS_PATH in a subdirectory of
+the BACKUP_PATH location, named after the youngest revision.
+
+Options:
+ --archive-type=FMT Create an archive of the backup. FMT can be one of:
+ bz2 : Creates a bzip2 compressed tar file.
+ gz : Creates a gzip compressed tar file.
+ zip : Creates a compressed zip file.
+ zip64: Creates a zip64 file (can be > 2GB).
+ --num-backups=N Number of prior backups to keep around (0 to keep all).
+ --verify Verify the backup.
+ --help -h Print this help message and exit.
+
+""" % (scriptname,))
+
+
+try:
+ opts, args = getopt.gnu_getopt(sys.argv[1:], "h?", ["archive-type=",
+ "num-backups=",
+ "verify",
+ "help"])
+except getopt.GetoptError as e:
+ sys.stderr.write("ERROR: %s\n\n" % e)
+ sys.stderr.flush()
+ usage(sys.stderr)
+ sys.exit(2)
+
+archive_type = None
+verify_copy = False
+
+for o, a in opts:
+ if o == "--archive-type":
+ archive_type = a
+ elif o == "--num-backups":
+ num_backups = int(a)
+ elif o == "--verify":
+ verify_copy = True
+ elif o in ("-h", "--help", "-?"):
+ usage()
+ sys.exit()
+
+if archive_type not in (None, 'bz2', 'gz', 'zip', 'zip64'):
+ sys.stderr.write("ERROR: Bad --archive-type\n")
+ usage(sys.stderr)
+ sys.exit(2)
+
+if len(args) != 2:
+ sys.stderr.write("ERROR: only two arguments allowed.\n\n")
+ sys.stderr.flush()
+ usage(sys.stderr)
+ sys.exit(2)
+
+# Path to repository
+repo_dir = args[0]
+repo = os.path.basename(os.path.abspath(repo_dir))
+
+# Where to store the repository backup. The backup will be placed in
+# a *subdirectory* of this location, named after the youngest
+# revision.
+backup_dir = args[1]
+
+# Added to the filename regexp, set when using --archive-type.
+ext_re = ""
+
+# Do we want to create an archive of the backup
+if archive_type:
+ if archive_type in archive_map:
+ # Additionally find files with the archive extension.
+ ext_re = "(" + re.escape(archive_map[archive_type]) + ")?"
+ else:
+ sys.stderr.write("Unknown archive type '%s'.\n\n\n" % archive_type)
+ sys.stderr.flush()
+ usage(sys.stderr)
+ sys.exit(2)
+
+
+######################################################################
+# Helper functions
+
+def comparator(a, b):
+ # We pass in filenames so there is never a case where they are equal.
+ regexp = re.compile("-(?P<revision>[0-9]+)(-(?P<increment>[0-9]+))?" +
+ ext_re + "$")
+ matcha = regexp.search(a)
+ matchb = regexp.search(b)
+ reva = int(matcha.groupdict()['revision'])
+ revb = int(matchb.groupdict()['revision'])
+ if (reva < revb):
+ return -1
+ elif (reva > revb):
+ return 1
+ else:
+ inca = matcha.groupdict()['increment']
+ incb = matchb.groupdict()['increment']
+ if not inca:
+ return -1
+ elif not incb:
+ return 1;
+ elif (int(inca) < int(incb)):
+ return -1
+ else:
+ return 1
+
+def get_youngest_revision():
+ """Examine the repository REPO_DIR using the svnlook binary
+ specified by SVNLOOK, and return the youngest revision."""
+
+ p = subprocess.Popen([svnlook, 'youngest', '--', repo_dir],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ infile, outfile, errfile = p.stdin, p.stdout, p.stderr
+
+ stdout_lines = outfile.readlines()
+ stderr_lines = errfile.readlines()
+ outfile.close()
+ infile.close()
+ errfile.close()
+
+ if stderr_lines:
+ raise Exception("Unable to find the youngest revision for repository '%s'"
+ ": %s" % (repo_dir, stderr_lines[0].rstrip()))
+
+ return stdout_lines[0].strip()
+
+######################################################################
+# Main
+
+print("Beginning hot backup of '"+ repo_dir + "'.")
+
+
+### Step 1: get the youngest revision.
+
+try:
+ youngest = get_youngest_revision()
+except Exception as e:
+ sys.stderr.write("%s\n" % e)
+ sys.stderr.flush()
+ sys.exit(1)
+
+print("Youngest revision is %s" % youngest)
+
+
+### Step 2: Find next available backup path
+
+backup_subdir = os.path.join(backup_dir, repo + "-" + youngest)
+
+# If there is already a backup of this revision, then append the
+# next highest increment to the path. We still need to do a backup
+# because the repository might have changed despite no new revision
+# having been created. We find the highest increment and add one
+# rather than start from 1 and increment because the starting
+# increments may have already been removed due to num_backups.
+
+regexp = re.compile("^" + re.escape(repo) + "-" + re.escape(youngest) +
+ "(-(?P<increment>[0-9]+))?" + ext_re + "$")
+directory_list = os.listdir(backup_dir)
+young_list = [x for x in directory_list if regexp.search(x)]
+if young_list:
+ young_list.sort(comparator)
+ increment = regexp.search(young_list.pop()).groupdict()['increment']
+ if increment:
+ backup_subdir = os.path.join(backup_dir, repo + "-" + youngest + "-"
+ + str(int(increment) + 1))
+ else:
+ backup_subdir = os.path.join(backup_dir, repo + "-" + youngest + "-1")
+
+### Step 3: Ask subversion to make a hot copy of a repository.
+### copied last.
+
+print("Backing up repository to '" + backup_subdir + "'...")
+err_code = subprocess.call([svnadmin, "hotcopy", "--clean-logs",
+ '--', repo_dir, backup_subdir])
+if err_code != 0:
+ sys.stderr.write("Unable to backup the repository.\n")
+ sys.stderr.flush()
+ sys.exit(err_code)
+else:
+ print("Done.")
+
+### Step 4: Verify the hotcopy
+if verify_copy:
+ print("Verifying backup...")
+ err_code = subprocess.call([svnadmin, "verify", "--quiet", '--', backup_subdir])
+ if err_code != 0:
+ sys.stderr.write("Backup verification failed.\n")
+ sys.stderr.flush()
+ sys.exit(err_code)
+ else:
+ print("Done.")
+
+### Step 5: Make an archive of the backup if required.
+if archive_type:
+ archive_path = backup_subdir + archive_map[archive_type]
+ err_msg = ""
+
+ print("Archiving backup to '" + archive_path + "'...")
+ if archive_type == 'gz' or archive_type == 'bz2':
+ try:
+ import tarfile
+ tar = tarfile.open(archive_path, 'w:' + archive_type)
+ tar.add(backup_subdir, os.path.basename(backup_subdir))
+ tar.close()
+ except ImportError as e:
+ err_msg = "Import failed: " + str(e)
+ err_code = -2
+ except tarfile.TarError as e:
+ err_msg = "Tar failed: " + str(e)
+ err_code = -3
+
+ elif archive_type == 'zip' or archive_type == 'zip64':
+ try:
+ import zipfile
+
+ def add_to_zip(zp, root, dirname, names):
+ root = os.path.join(root, '')
+
+ for file in names:
+ path = os.path.join(dirname, file)
+ if os.path.isfile(path):
+ zp.write(path, path[len(root):])
+ elif os.path.isdir(path) and os.path.islink(path):
+ for dirpath, dirs, files in os.walk(path):
+ add_to_zip(zp, path, dirpath, dirs + files)
+
+ zp = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED, archive_type == 'zip64')
+ for dirpath, dirs, files in os.walk(backup_subdir):
+ add_to_zip(zp, backup_dir, dirpath, dirs + files)
+ zp.close()
+ except ImportError as e:
+ err_msg = "Import failed: " + str(e)
+ err_code = -4
+ except zipfile.error as e:
+ err_msg = "Zip failed: " + str(e)
+ err_code = -5
+
+
+ if err_code != 0:
+ sys.stderr.write("Unable to create an archive for the backup.\n%s\n" % err_msg)
+ sys.stderr.flush()
+ sys.exit(err_code)
+ else:
+ print("Archive created, removing backup '" + backup_subdir + "'...")
+ safe_rmtree(backup_subdir, 1)
+
+### Step 6: finally, remove all repository backups other than the last
+### NUM_BACKUPS.
+
+if num_backups > 0:
+ regexp = re.compile("^" + re.escape(repo) + "-[0-9]+(-[0-9]+)?" + ext_re + "$")
+ directory_list = os.listdir(backup_dir)
+ old_list = [x for x in directory_list if regexp.search(x)]
+ old_list.sort(comparator)
+ del old_list[max(0,len(old_list)-num_backups):]
+ for item in old_list:
+ old_backup_item = os.path.join(backup_dir, item)
+ print("Removing old backup: " + old_backup_item)
+ if os.path.isdir(old_backup_item):
+ safe_rmtree(old_backup_item, 1)
+ else:
+ os.remove(old_backup_item)
diff --git a/tools/bdb/erase-all-text-data.py b/tools/bdb/erase-all-text-data.py
new file mode 100755
index 0000000..288c4ab
--- /dev/null
+++ b/tools/bdb/erase-all-text-data.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Erases the text of every file in a BDB repository
+#
+
+import sys, os
+import skel, svnfs
+
+def main():
+ if len(sys.argv) == 2:
+ dbhome = os.path.join(sys.argv[1], 'db')
+ for i in ('', 'uuids', 'revisions', 'transactions', 'representations',
+ 'strings', 'changes', 'copies', 'nodes'):
+ if not os.path.exists(os.path.join(dbhome, i)):
+ sys.stderr.write("%s: '%s' is not a valid bdb svn repository\n" %
+ (sys.argv[0], sys.argv[1]))
+ sys.exit(1)
+ else:
+ sys.stderr.write("Usage: %s <bdb-svn-repository>\n" % sys.argv[0])
+ sys.exit(1)
+
+ print("WARNING!: This program will destroy all text data in the subversion")
+ print("repository '%s'" % sys.argv[1])
+ print("Do not proceed unless this is a *COPY* of your real repository")
+ print("If this is really what you want to do, " \
+ "type 'YESERASE' and press Return")
+ if sys.version_info[0] >= 3:
+ # Python >=3.0
+ confirmation = input("Confirmation string> ")
+ else:
+ # Python <3.0
+ confirmation = raw_input("Confirmation string> ")
+ if confirmation != "YESERASE":
+ print("Cancelled - confirmation string not matched")
+ sys.exit(0)
+ print("Opening database environment...")
+ cur = None
+ ctx = svnfs.Ctx(dbhome)
+ try:
+ cur = ctx.nodes_db.cursor()
+ nodecount = 0
+ newrep = skel.Rep()
+ newrep.str = "empty"
+ empty_fulltext_rep_skel = newrep.unparse()
+ del newrep
+ ctx.strings_db['empty'] = ""
+ rec = cur.first()
+ while rec:
+ if rec[0] != "next-key":
+ if (nodecount % 10000 == 0) and nodecount != 0:
+ print("Processed %d nodes..." % nodecount)
+ nodecount += 1
+ node = skel.Node(rec[1])
+ if node.kind == "file":
+ rep = skel.Rep(ctx.reps_db[node.datarep])
+ if rep.kind == "fulltext":
+ if rep.str in ctx.strings_db:
+ del ctx.strings_db[rep.str]
+ ctx.reps_db[node.datarep] = empty_fulltext_rep_skel
+ else:
+ for w in rep.windows:
+ if w.str in ctx.strings_db:
+ del ctx.strings_db[w.str]
+ ctx.reps_db[node.datarep] = empty_fulltext_rep_skel
+ rec = cur.next()
+ print("Processed %d nodes" % nodecount)
+ finally:
+ if cur:
+ cur.close()
+ ctx.close()
+ print("Done")
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/bdb/skel.py b/tools/bdb/skel.py
new file mode 100644
index 0000000..3e0a255
--- /dev/null
+++ b/tools/bdb/skel.py
@@ -0,0 +1,226 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# Python parser for Subversion skels
+
+import string, re
+
+def parse(s):
+ if s[0] != '(' and s[-1] != ')':
+ raise ValueError("Improperly bounded skel: '%s'" % s)
+ wholeskel = s
+ s = s[1:-1].lstrip()
+ prev_accums = []
+ accum = []
+ while True:
+ if len(s) == 0:
+ return accum
+ if s[0] in string.digits:
+ split_tuple = s.split(' ',1)
+ count = int(split_tuple[0])
+ if len(split_tuple) > 1:
+ s = split_tuple[1]
+ else:
+ s = ""
+ accum.append(s[:count])
+ s = s[count:].lstrip()
+ continue
+ if s[0] in string.ascii_letters:
+ i = 0
+ while (s[i] not in ' ()'):
+ i += 1
+ if i == len(s):
+ break
+ accum.append(s[:i])
+ s = s[i:].lstrip()
+ continue
+ if s[0] == '(':
+ new_accum = []
+ accum.append(new_accum)
+ prev_accums.append(accum)
+ accum = new_accum
+ s = s[1:].lstrip()
+ continue
+ if s[0] == ')':
+ accum = prev_accums.pop()
+ s = s[1:].lstrip()
+ continue
+ if s[0] == ' ':
+ s = str.lstrip(' ')
+ continue
+ raise ValueError("Unexpected contents in skel: '%s'\n'%s'" % (s, wholeskel))
+
+
+_ok_implicit = re.compile(r'^[A-Za-z]([^\(\) \r\n\t\f]*)$')
+def unparse(struc):
+ accum = []
+ for ent in struc:
+ if isinstance(ent, str):
+ if len(ent) > 0 and _ok_implicit.match(ent[0]):
+ accum.append(ent)
+ else:
+ accum.append(str(len(ent)))
+ accum.append(ent)
+ else:
+ accum.append(unparse(ent))
+ return "("+" ".join(accum)+")"
+
+
+class Rev:
+ def __init__(self, skelstring="(revision null)"):
+ sk = parse(skelstring)
+ if len(sk) == 2 and sk[0] == "revision" and isinstance(sk[1], str):
+ self.txn = sk[1]
+ else:
+ raise ValueError("Invalid revision skel: %s" % skelstring)
+
+ def unparse(self):
+ return unparse( ("revision", self.txn) )
+
+
+class Change:
+ def __init__(self, skelstring="(change null null null 0 0 )"):
+ sk = parse(skelstring)
+ if len(sk) == 6 and sk[0] == "change" and type(sk[1]) == type(sk[2]) \
+ == type(sk[3]) == type(sk[4]) == type(sk[5]) == str:
+ self.path = sk[1]
+ self.node = sk[2]
+ self.kind = sk[3]
+ self.textmod = sk[4]
+ self.propmod = sk[5]
+ else:
+ raise ValueError("Invalid change skel: %s" % skelstring)
+
+ def unparse(self):
+ return unparse( ("change", self.path, self.node, self.kind,
+ self.textmod and "1" or "", self.propmod and "1" or "") )
+
+
+class Copy:
+ def __init__(self, skelstring="(copy null null null)"):
+ sk = parse(skelstring)
+ if len(sk) == 4 and sk[0] in ("copy", "soft-copy") and type(sk[1]) \
+ == type(sk[2]) == type(sk[3]) == str:
+ self.kind = sk[0]
+ self.srcpath = sk[1]
+ self.srctxn = sk[2]
+ self.destnode = sk[3]
+ else:
+ raise ValueError("Invalid copy skel: %s" % skelstring)
+
+ def unparse(self):
+ return unparse( (self.kind, self.srcpath, self.srctxn, self.destnode) )
+
+
+class Node:
+ def __init__(self,skelstring="((file null null 1 0) null null)"):
+ sk = parse(skelstring)
+ if (len(sk) == 3 or (len(sk) == 4 and isinstance(sk[3], str))) \
+ and isinstance(sk[0], list) and isinstance(sk[1], str) \
+ and isinstance(sk[2], str) and sk[0][0] in ("file", "dir") \
+ and type(sk[0][1]) == type(sk[0][2]) == type(sk[0][3]) == str:
+ self.kind = sk[0][0]
+ self.createpath = sk[0][1]
+ self.prednode = sk[0][2]
+ self.predcount = int(sk[0][3])
+ self.proprep = sk[1]
+ self.datarep = sk[2]
+ if len(sk) > 3:
+ self.editrep = sk[3]
+ else:
+ self.editrep = None
+ else:
+ raise ValueError("Invalid node skel: %s" % skelstring)
+
+ def unparse(self):
+ structure = [ (self.kind, self.createpath, self.prednode,
+ str(self.predcount)), self.proprep, self.datarep ]
+ if self.editrep:
+ structure.append(self.editrep)
+ return unparse( structure )
+
+
+class Txn:
+ def __init__(self,skelstring="(transaction null null () ())"):
+ sk = parse(skelstring)
+ if len(sk) == 5 and sk[0] in ("transaction", "committed", "dead") \
+ and type(sk[1]) == type(sk[2]) == str \
+ and type(sk[3]) == type(sk[4]) == list and len(sk[3]) % 2 == 0:
+ self.kind = sk[0]
+ self.rootnode = sk[1]
+ if self.kind == "committed":
+ self.rev = sk[2]
+ else:
+ self.basenode = sk[2]
+ self.proplist = sk[3]
+ self.copies = sk[4]
+ else:
+ raise ValueError("Invalid transaction skel: %s" % skelstring)
+
+ def unparse(self):
+ if self.kind == "committed":
+ base_item = self.rev
+ else:
+ base_item = self.basenode
+ return unparse( (self.kind, self.rootnode, base_item, self.proplist,
+ self.copies) )
+
+
+class SvnDiffWindow:
+ def __init__(self, skelstructure):
+ self.offset = skelstructure[0]
+ self.svndiffver = skelstructure[1][0][1]
+ self.str = skelstructure[1][0][2]
+ self.size = skelstructure[1][1]
+ self.vs_rep = skelstructure[1][2]
+
+ def _unparse_structure(self):
+ return ([ self.offset, [ [ 'svndiff', self.svndiffver, self.str ],
+ self.size, self.vs_rep ] ])
+
+
+class Rep:
+ def __init__(self, skelstring="((fulltext 0 (md5 16 \0\0\0\0\0\0\0\0" \
+ "\0\0\0\0\0\0\0\0)) null)"):
+ sk = parse(skelstring)
+ if isinstance(sk[0], list) and len(sk[0]) == 3 \
+ and isinstance(sk[0][1], str) \
+ and isinstance(sk[0][2], list) and len(sk[0][2]) == 2 \
+ and type(sk[0][2][0]) == type(sk[0][2][1]) == str:
+ self.kind = sk[0][0]
+ self.txn = sk[0][1]
+ self.cksumtype = sk[0][2][0]
+ self.cksum = sk[0][2][1]
+ if len(sk) == 2 and sk[0][0] == "fulltext":
+ self.str = sk[1]
+ elif len(sk) >= 2 and sk[0][0] == "delta":
+ self.windows = list(map(SvnDiffWindow, sk[1:]))
+ else:
+ raise ValueError("Invalid representation skel: %s" % repr(skelstring))
+
+ def unparse(self):
+ structure = [ [self.kind, self.txn, [self.cksumtype, self.cksum] ] ]
+ if self.kind == "fulltext":
+ structure.append(self.str)
+ elif self.kind == "delta":
+ for w in self.windows:
+ structure.append(w._unparse_structure())
+ return unparse( structure )
+
diff --git a/tools/bdb/svn-bdb-view.py b/tools/bdb/svn-bdb-view.py
new file mode 100755
index 0000000..43dcc21
--- /dev/null
+++ b/tools/bdb/svn-bdb-view.py
@@ -0,0 +1,295 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# This is a pretty-printer for subversion BDB repository databases.
+#
+
+import sys, os, re, codecs, textwrap
+import skel, svnfs
+
+# Parse arguments
+if len(sys.argv) == 2:
+ dbhome = os.path.join(sys.argv[1], 'db')
+ if not os.path.exists(dbhome):
+ sys.stderr.write("%s: '%s' is not a valid svn repository\n" %
+ (sys.argv[0], dbhome))
+ sys.exit(1)
+else:
+ sys.stderr.write("Usage: %s <svn-repository>\n" % sys.argv[0])
+ sys.exit(1)
+
+# Helper Classes
+class RepositoryProblem(Exception):
+ pass
+
+# Helper Functions
+def ok(bool, comment):
+ if not bool:
+ raise RepositoryProblem(text)
+
+# Helper Data
+opmap = {
+ 'add': 'A',
+ 'modify': 'M',
+ 'delete': 'D',
+ 'replace': 'R',
+ 'reset': 'X',
+}
+
+# Analysis Modules
+def am_uuid(ctx):
+ "uuids"
+ db = ctx.uuids_db
+ ok(list(db.keys()) == [1], 'uuid Table Structure')
+ ok(re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$',
+ db[1]), 'UUID format')
+ print("Repos UUID: %s" % db[1])
+
+def am_revisions(ctx):
+ "revisions"
+ cur = ctx.revs_db.cursor()
+ try:
+ rec = cur.first()
+ ctx.txn2rev = txn2rev = {}
+ prevrevnum = -1
+ while rec:
+ rev = skel.Rev(rec[1])
+ revnum = rec[0] - 1
+ print("r%d: txn %s%s" % (revnum, rev.txn,
+ (rev.txn not in ctx.txns_db) and "*** MISSING TXN ***" or ""))
+ ok(rev.txn not in txn2rev, 'Multiple revs bound to same txn')
+ txn2rev[rev.txn] = revnum
+ rec = cur.next()
+ finally:
+ cur.close()
+
+def am_changes(ctx):
+ "changes"
+ cur = ctx.changes_db.cursor()
+ try:
+ current_txnid_len = 0
+ maximum_txnid_len = 0
+ while current_txnid_len <= maximum_txnid_len:
+ current_txnid_len += 1
+ rec = cur.first()
+ prevtxn = None
+ while rec:
+ if len(rec[0]) != current_txnid_len:
+ rec = cur.next()
+ continue
+ ch = skel.Change(rec[1])
+ lead = "txn %s:" % rec[0]
+ if prevtxn == rec[0]:
+ lead = " " * len(lead)
+ print("%s %s %s %s %s %s%s" % (lead, opmap[ch.kind], ch.path, ch.node,
+ ch.textmod and "T" or "-", ch.propmod and "P" or "-",
+ (ch.node not in ctx.nodes_db) \
+ and "*** MISSING NODE ***" or ""))
+ prevtxn = rec[0]
+ if len(rec[0]) > maximum_txnid_len:
+ maximum_txnid_len = len(rec[0])
+ rec = cur.next()
+ finally:
+ cur.close()
+
+def am_copies(ctx):
+ "copies"
+ cur = ctx.copies_db.cursor()
+ try:
+ print("next-key: %s" % ctx.copies_db['next-key'])
+ rec = cur.first()
+ while rec:
+ if rec[0] != 'next-key':
+ cp = skel.Copy(rec[1])
+ destnode = ctx.nodes_db.get(cp.destnode)
+ if not destnode:
+ destpath = "*** MISSING NODE ***"
+ else:
+ destpath = skel.Node(destnode).createpath
+ print("cpy %s: %s %s @txn %s to %s (%s)" % (rec[0],
+ {'copy':'C','soft-copy':'S'}[cp.kind], cp.srcpath or "-",
+ cp.srctxn or "-", cp.destnode, destpath))
+ rec = cur.next()
+ finally:
+ cur.close()
+
+def am_txns(ctx):
+ "transactions"
+ cur = ctx.txns_db.cursor()
+ try:
+ print("next-key: %s" % ctx.txns_db['next-key'])
+ length = 1
+ found_some = True
+ while found_some:
+ found_some = False
+ rec = cur.first()
+ while rec:
+ if rec[0] != 'next-key' and len(rec[0]) == length:
+ found_some = True
+ txn = skel.Txn(rec[1])
+ if txn.kind == "committed":
+ label = "r%s" % txn.rev
+ ok(ctx.txn2rev[rec[0]] == int(txn.rev), 'Txn->rev not <-txn')
+ else:
+ label = "%s based-on %s" % (txn.kind, txn.basenode)
+ print("txn %s: %s root-node %s props %d copies %s" % (rec[0],
+ label, txn.rootnode, len(txn.proplist) / 2, ",".join(txn.copies)))
+ rec = cur.next()
+ length += 1
+ finally:
+ cur.close()
+
+def am_nodes(ctx):
+ "nodes"
+ cur = ctx.nodes_db.cursor()
+ try:
+ print("next-key: %s" % ctx.txns_db['next-key'])
+ rec = cur.first()
+ data = {}
+ while rec:
+ if rec[0] == 'next-key':
+ rec = cur.next()
+ continue
+ nd = skel.Node(rec[1])
+ nid,cid,tid = rec[0].split(".")
+ data[tid.rjust(20)+nd.createpath] = (rec[0], nd)
+ rec = cur.next()
+ k = sorted(data.keys())
+ reptype = {"fulltext":"F", "delta":"D"}
+ for i in k:
+ nd = data[i][1]
+ prkind = drkind = " "
+ if nd.proprep:
+ try:
+ rep = skel.Rep(ctx.reps_db[nd.proprep])
+ prkind = reptype[rep.kind]
+ if nd.proprep in ctx.bad_reps:
+ prkind += " *** BAD ***"
+ except KeyError:
+ prkind = "*** MISSING ***"
+ if nd.datarep:
+ try:
+ rep = skel.Rep(ctx.reps_db[nd.datarep])
+ drkind = reptype[rep.kind]
+ if nd.datarep in ctx.bad_reps:
+ drkind += " *** BAD ***"
+ except KeyError:
+ drkind = "*** MISSING ***"
+ stringdata = "%s: %s %s pred %s count %s prop %s %s data %s %s edit %s" \
+ % ( data[i][0], {"file":"F", "dir":"D"}[nd.kind], nd.createpath,
+ nd.prednode or "-", nd.predcount, prkind, nd.proprep or "-",
+ drkind, nd.datarep or "-", nd.editrep or "-")
+ if nd.createpath == "/":
+ print("")
+ print(stringdata)
+ finally:
+ cur.close()
+
+def get_string(ctx, id):
+ try:
+ return ctx.get_whole_string(id)
+ except DbNotFoundError:
+ return "*** MISSING STRING ***"
+
+def am_reps(ctx):
+ "representations"
+ ctx.bad_reps = {}
+ cur = ctx.reps_db.cursor()
+ try:
+ print("next-key: %s" % ctx.txns_db['next-key'])
+ rec = cur.first()
+ while rec:
+ if rec[0] != 'next-key':
+ rep = skel.Rep(rec[1])
+ lead = "rep %s: txn %s: %s %s " % (rec[0], rep.txn, rep.cksumtype,
+ codecs.getencoder('hex_codec')(rep.cksum)[0])
+ if rep.kind == "fulltext":
+ note = ""
+ if rep.str not in ctx.strings_db:
+ note = " *MISS*"
+ ctx.bad_reps[rec[0]] = None
+ print(lead+("fulltext str %s%s" % (rep.str, note)))
+ if ctx.verbose:
+ print(textwrap.fill(get_string(ctx, rep.str), initial_indent=" ",
+ subsequent_indent=" ", width=78))
+ elif rep.kind == "delta":
+ print(lead+("delta of %s window%s" % (len(rep.windows),
+ len(rep.windows) != 1 and "s" or "")))
+ for window in rep.windows:
+ noterep = notestr = ""
+ if window.vs_rep not in ctx.reps_db:
+ noterep = " *MISS*"
+ ctx.bad_reps[rec[0]] = None
+ if window.str not in ctx.strings_db:
+ notestr = " *MISS*"
+ ctx.bad_reps[rec[0]] = None
+ print("\toff %s len %s vs-rep %s%s str %s%s" % (window.offset,
+ window.size, window.vs_rep, noterep, window.str, notestr))
+ else:
+ print(lead+"*** UNKNOWN REPRESENTATION TYPE ***")
+ rec = cur.next()
+ finally:
+ cur.close()
+
+
+def am_stringsize(ctx):
+ "string size"
+ if not ctx.verbose:
+ return
+ cur = ctx.strings_db.cursor()
+ try:
+ rec = cur.first()
+ size = 0
+ while rec:
+ size = size + len(rec[1] or "")
+ rec = cur.next()
+ print("%s %s %s" % (size, size/1024.0, size/1024.0/1024.0))
+ finally:
+ cur.close()
+
+modules = (
+ am_uuid,
+ am_revisions,
+ am_changes,
+ am_copies,
+ am_txns,
+ am_reps,
+ am_nodes,
+ # Takes too long: am_stringsize,
+ )
+
+def main():
+ print("Repository View for '%s'" % dbhome)
+ print("")
+ ctx = svnfs.Ctx(dbhome, readonly=1)
+ # Stash process state in a library data structure. Yuck!
+ ctx.verbose = 0
+ try:
+ for am in modules:
+ print("MODULE: %s" % am.__doc__)
+ am(ctx)
+ print("")
+ finally:
+ ctx.close()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/bdb/svnfs.py b/tools/bdb/svnfs.py
new file mode 100644
index 0000000..c67fa61
--- /dev/null
+++ b/tools/bdb/svnfs.py
@@ -0,0 +1,97 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# A handle object for convenience in opening a svn repository
+
+import sys
+
+# We need a bsddb linked to the same version of Berkeley DB as Subversion is
+try:
+ import bsddb3 as bsddb
+except ImportError:
+ import bsddb
+
+# Publish the result
+sys.modules['svnfs_bsddb'] = bsddb
+
+from svnfs_bsddb.db import *
+
+class Ctx:
+ def __init__(self, dbhome, readonly=None):
+ self.env = self.uuids_db = self.revs_db = self.txns_db = self.changes_db \
+ = self.copies_db = self.nodes_db = self.reps_db = self.strings_db = \
+ None
+ try:
+ self.env = DBEnv()
+ self.env.set_lk_detect(DB_LOCK_RANDOM)
+ self.env.set_get_returns_none(1)
+ self.env.open(dbhome, DB_CREATE | DB_INIT_MPOOL | DB_INIT_TXN \
+ | DB_INIT_LOCK | DB_INIT_LOG)
+ def open_db(dbname):
+ db = DB(self.env)
+ dbflags = 0
+ if readonly:
+ dbflags = DB_RDONLY
+ db.open(dbname, flags=dbflags)
+ return db
+ self.uuids_db = open_db('uuids')
+ self.revs_db = open_db('revisions')
+ self.txns_db = open_db('transactions')
+ self.changes_db = open_db('changes')
+ self.copies_db = open_db('copies')
+ self.nodes_db = open_db('nodes')
+ self.reps_db = open_db('representations')
+ self.strings_db = open_db('strings')
+ except:
+ self.close()
+ raise
+
+ def close(self):
+ def close_if_not_None(i):
+ if i is not None:
+ i.close()
+ close_if_not_None(self.uuids_db )
+ close_if_not_None(self.revs_db )
+ close_if_not_None(self.txns_db )
+ close_if_not_None(self.changes_db )
+ close_if_not_None(self.copies_db )
+ close_if_not_None(self.nodes_db )
+ close_if_not_None(self.reps_db )
+ close_if_not_None(self.strings_db )
+ close_if_not_None(self.env )
+ self.env = self.uuids_db = self.revs_db = self.txns_db = self.changes_db \
+ = self.copies_db = self.nodes_db = self.reps_db = self.strings_db = \
+ None
+
+ # And now, some utility functions
+ def get_whole_string(self, key):
+ cur = self.strings_db.cursor()
+ try:
+ rec = cur.set(key)
+ if rec is None:
+ raise DBNotFoundError
+ str = ""
+ while rec:
+ str = str + (rec[1] or "")
+ rec = cur.next_dup()
+ finally:
+ cur.close()
+ return str
+
diff --git a/tools/bdb/whatis-rep.py b/tools/bdb/whatis-rep.py
new file mode 100755
index 0000000..55ed4b7
--- /dev/null
+++ b/tools/bdb/whatis-rep.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Print a description (including data, path, and revision) of the
+# specified node reps in a Subversion filesystem. Walks as much of
+# the reps table as necessary to locate the data (e.g. does a table
+# scan).
+
+# Standard modules
+import sys, os, re, codecs
+
+# Local support modules
+import skel, svnfs
+
+def main():
+ progname = os.path.basename(sys.argv[0])
+ if len(sys.argv) >= 3:
+ dbhome = os.path.join(sys.argv[1], 'db')
+ if not os.path.exists(dbhome):
+ sys.stderr.write("%s: '%s' is not a valid svn repository\n" %
+ (sys.argv[0], dbhome))
+ sys.stderr.flush()
+ sys.exit(1)
+ rep_ids = sys.argv[2:]
+ else:
+ sys.stderr.write("Usage: %s <svn-repository> <rep-id>...\n" % progname)
+ sys.stderr.flush()
+ sys.exit(1)
+
+ print("%s running on repository '%s'" % (progname, dbhome))
+ print("")
+ rep_ids = dict.fromkeys(rep_ids)
+ ctx = svnfs.Ctx(dbhome)
+ try:
+ cur = ctx.nodes_db.cursor()
+ try:
+ rec = cur.first()
+ while rec:
+ if rec[0] != 'next-key':
+ nid, cid, tid = rec[0].split(".")
+ nd = skel.Node(rec[1])
+ if nd.datarep in rep_ids:
+ rev = skel.Txn(ctx.txns_db[tid]).rev
+ print("%s: data of '%s%s' in r%s" % (nd.datarep,
+ nd.createpath, {"dir":'/', "file":''}[nd.kind], rev))
+ if nd.proprep in rep_ids:
+ rev = skel.Txn(ctx.txns_db[tid]).rev
+ print("%s: properties of '%s%s' in r%s" % (nd.datarep,
+ nd.createpath, {"dir":'/', "file":''}[nd.kind], rev))
+ rec = cur.next()
+ finally:
+ cur.close()
+ finally:
+ ctx.close()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/buildbot/master/README b/tools/buildbot/master/README
new file mode 100644
index 0000000..35fbff0
--- /dev/null
+++ b/tools/buildbot/master/README
@@ -0,0 +1,7 @@
+The BuildBot Master is managed by the ASF Infrastructure team.
+
+This was announced per this email:
+https://mail-archives.apache.org/mod_mbox/subversion-dev/201005.mbox/%3CAANLkTilvSpSwJHLlJVpKpGVAI2-JQyGqLqCn1Sjgo-Qf@mail.gmail.com%3E
+
+The new BuildBot Master configuration is maintained here:
+https://svn.apache.org/repos/infra/infrastructure/buildbot/aegis/buildmaster/master1/
diff --git a/tools/buildbot/slaves/README b/tools/buildbot/slaves/README
new file mode 100644
index 0000000..a23fad5
--- /dev/null
+++ b/tools/buildbot/slaves/README
@@ -0,0 +1,92 @@
+How to setup a buildslave?
+--------------------------
+
+1. Install Buildbot
+-------------------
+Buildbot is a Python application, you need to install Python 2.2+.
+
+Download & install:
+- Twisted 2.2: http://twistedmatrix.com/trac/
+- ZopeInterface 3.0.1: http://www.zope.org/Products/ZopeInterface
+- Buildbot 0.7.2: http://sourceforge.net/project/showfiles.php?group_id=73177
+
+The Buildbot installer creates a buildbot script in /usr/bin. On Windows,
+buildbot.bat is installed in the %PYTHON\Scripts folder.
+
+Test your buildbot install by running the buildbot script.
+
+2. Creating a buildslave
+------------------------
+Creating a buildslave is explained in the Buildbot manual:
+http://buildbot.sourceforge.net/manual-0.7.2.html#Creating-a-buildslave
+
+Before you can install the buildslave, you'll need some information from
+the buildmaster administrator:
+
+- your botname and password. The botname is a unique id for your buildslave.
+Current botnames include the OS+version, compiler and processor architecture.
+(eg. osx10.4-gcc4.0.1-ia32, xp-vs2003-ia32...)
+- hostname and port on which the buildmaster is listening.
+- hostname and port on which the TryScheduler is listening.
+- username and password for the FTP server where you can send the test log-
+ files.
+
+Make sure you fill in the info/host file with some information concerning your
+buildslave.
+
+3. Setting up the buildslave
+----------------------------
+A build on a slave is always triggered and managed by the Buildmaster. In fact,
+by connecting to the buildmaster, the slave gives full shell access on his
+machine to the buildmaster! Make sure you run the slave in an account with
+limited access.
+
+The build process is split in 4 steps.
+- svn checkout or update
+- build
+- check
+- clean
+
+The first step is handled by the buildbot code (SVNStep) and can not be
+further configured on the buildslave.
+For all the other steps you have to provide scripts and put them in the
+slavedir/[botname] folder.
+
+You can copy and reuse the scripts the other buildbot slave owners use for
+Windows, Linux or Mac buildslaves. You can find them here:
+http://svn.apache.org/repos/asf/subversion/trunk/tools/buildbot/slaves
+
+4. Builds
+---------
+A build will be triggered each time someone commits changes on trunk or
+branches/1.4.x. If the buildslave is not online, then the build will be
+stored in the queue and started when the buildslave connects.
+
+Normally each build only includes the changes of one commit. However, when
+multiple changes arive during a previous build, those will be combined in
+a next build.
+
+The buildmaster looks at each commit and decides if they are important enough
+to trigger a build. Documentation, contrib, www... changes are typically
+ignored.
+
+5. Try
+------
+Buildbot has a feature that allows a Subversion developer to test a patch she's
+working on on one of the builders. Example, someone working on Linux might want
+to test a major change first on a Windows builder before committing the patch.
+
+It's up to you to choose whether your buildslave can accept patches or not,
+there's no obligation. Basically by allowing patches to be tried on your
+buildslave you give people the ability to compile and execute arbitrary code
+on your machine.
+Try access is password protected and will be reserved to Subversion full
+committers.
+
+How to use the try feature:
+TryScheduler is installed on its own dedicated port. It can be used by
+a client with 'buildbot try':
+http://buildbot.sourceforge.net/manual-0.7.5.html#try
+
+Contact the buildmaster administrator for the username & password which you
+need to use 'buildbot try'.
diff --git a/tools/buildbot/slaves/bb-openbsd/svnbuild.sh b/tools/buildbot/slaves/bb-openbsd/svnbuild.sh
new file mode 100755
index 0000000..42c3260
--- /dev/null
+++ b/tools/buildbot/slaves/bb-openbsd/svnbuild.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -e
+set -x
+
+export JAVA_HOME=/usr/local/jdk-1.7.0
+
+branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
+(cd .. && gmake BRANCH="$branch" THREADING="no")
diff --git a/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh b/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh
new file mode 100755
index 0000000..9d47b1f
--- /dev/null
+++ b/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -e
+set -x
+
+branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
+export MALLOC_OPTIONS=S
+(cd .. && gmake BRANCH="$branch" THREADING="no" svn-check-bindings)
+grep -q "^Result: PASS$" tests.log.bindings.pl || exit 1
+grep -q "^OK$" tests.log.bindings.py || exit 1
+grep -q ", 0 failures, 0 errors" tests.log.bindings.rb || exit 1
+#TODO javahl
+exit 0
diff --git a/tools/buildbot/slaves/bb-openbsd/svncheck.sh b/tools/buildbot/slaves/bb-openbsd/svncheck.sh
new file mode 100755
index 0000000..7d50d98
--- /dev/null
+++ b/tools/buildbot/slaves/bb-openbsd/svncheck.sh
@@ -0,0 +1,36 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -e
+set -x
+
+branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
+export MALLOC_OPTIONS=S
+(cd .. && gmake BRANCH="$branch" PARALLEL="" THREADING="no" \
+ MEMCACHED_SERVER="127.0.0.1:11211" \
+ EXCLUSIVE_WC_LOCKS=1 \
+ svn-check-local \
+ svn-check-svn \
+ svn-check-neon \
+ svn-check-serf)
+grep -q "^FAIL:" tests.log.svn-check* && exit 1
+grep -q "^XPASS:" tests.log.svn-check* && exit 1
+exit 0
diff --git a/tools/buildbot/slaves/bb-openbsd/svnclean.sh b/tools/buildbot/slaves/bb-openbsd/svnclean.sh
new file mode 100755
index 0000000..063f4dd
--- /dev/null
+++ b/tools/buildbot/slaves/bb-openbsd/svnclean.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -e
+set -x
+
+branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
+(test -h ../svn-trunk || ln -s build ../svn-trunk)
+for i in 6 7 8 9 10; do
+ (test -h ../svn-1.${i}.x || ln -s build ../svn-1.${i}.x)
+done
+svn update ../../unix-build
+(test -h ../GNUmakefile || ln -s ../unix-build/Makefile.svn ../GNUmakefile)
+(cd .. && gmake BRANCH="$branch" reset clean)
+rm -f tests.log* fails.log*
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c b/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c
new file mode 100644
index 0000000..1c54ea5
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c
@@ -0,0 +1,35 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+
+#include <stdio.h>
+#include <unistd.h>
+
+
+int main()
+{
+ const char *cmd = "/bin/mount -t tmpfs -o size=50M tmpfs `subversion/tests/cmdline/svn-test-work";
+
+ setuid(0);
+
+ system(cmd);
+
+}
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh
new file mode 100755
index 0000000..c1c20ae
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+echo "========= autogen.sh"
+./autogen.sh || exit $?
+
+echo "========= configure"
+./configure --disable-static --enable-shared \
+ --enable-maintainer-mode \
+ --with-neon=/usr/local/neon-0.25.5 \
+ --with-apxs=/usr/sbin/apxs \
+ --without-berkeley-db \
+ --with-apr=/usr/local/apr \
+ --with-apr-util=/usr/local/apr || exit $?
+
+echo "========= make"
+make || exit $?
+
+# echo "========= make swig-py"
+# make swig-py || exit $?
+
+# echo "========= make swig-pl"
+# make swig-pl || exit $?
+
+#echo "========= make swig-rb"
+#make swig-rb || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh b/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh
new file mode 100755
index 0000000..dc06a82
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+set -x
+
+if test -z "$1" ; then
+ echo "Missing FS_TYPE specifier (arg #1)."
+ exit 1
+fi
+
+echo "========= mount RAM disc"
+# ignore the result: if it fails, the test will just take longer...
+mkdir -p subversion/tests/cmdline/svn-test-work
+test -e ../mount-ramdrive && ../mount-ramdrive
+
+echo "========= make check"
+make check FS_TYPE=$1 CLEANUP=1 || exit $?
+
+# echo "========= make check-swig-pl"
+# make check-swig-pl || exit $?
+
+#echo "========= make check-swig-rb"
+#make check-swig-rb || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh
new file mode 100755
index 0000000..95d4290
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+echo "========= unmount RAM disc"
+# ignore the result: if there was no ramdisc, that's fine
+test -e ../unmount-ramdrive && ../unmount-ramdrive
+
+echo "========= make extraclean"
+test -e Makefile && { make extraclean || exit $?; }
+
+exit 0
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh
new file mode 100755
index 0000000..c2302e1
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+# upload file to server
+FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
+tar -czf $FILENAME tests.log
+ftp -n www.mobsol.be < ../ftpscript
+rm $FILENAME
+
+echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
+
+exit 0
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c b/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c
new file mode 100644
index 0000000..5c3dbe7
--- /dev/null
+++ b/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c
@@ -0,0 +1,36 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+
+#include <stdio.h>
+#include <unistd.h>
+
+
+int main()
+{
+ const char *cmd = "/bin/umount `/usr/bin/dirname $0`/build/subversion/tests/cmdline/svn-test-work";
+
+ setuid(0);
+
+ return system(cmd);
+
+}
+
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/mount-tmpfs.c b/tools/buildbot/slaves/svn-sparc-solaris/mount-tmpfs.c
new file mode 100644
index 0000000..74b3e6a
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/mount-tmpfs.c
@@ -0,0 +1,38 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <strings.h>
+
+int main(int argc, char **argv)
+{
+ const char *cmd;
+ const char *name = strrchr(argv[0], '/');
+
+ if (name)
+ ++name;
+ else
+ name = argv[0];
+
+ if (!strcmp(name, "mount-tmpfs"))
+ {
+ cmd = "/usr/sbin/mount -F tmpfs -o size=768m tmpfs /export/home/wandisco/buildbot/slave/svn-sparc-solaris/obj/subversion/tests/";
+ }
+ else if (!strcmp(name, "umount-tmpfs"))
+ {
+ cmd = "/usr/sbin/umount /export/home/wandisco/buildbot/slave/svn-sparc-solaris/obj/subversion/tests/";
+ }
+ else
+ {
+ fprintf(stderr, "command not recognised\n");
+ return -1;
+ }
+
+ if (setuid(0))
+ {
+ fprintf(stderr, "setuid failed\n");
+ return -1;
+ }
+
+ return system(cmd);
+}
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/rebuild-svn-bits.sh b/tools/buildbot/slaves/svn-sparc-solaris/rebuild-svn-bits.sh
new file mode 100644
index 0000000..4f4c09b
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/rebuild-svn-bits.sh
@@ -0,0 +1,219 @@
+#!/bin/sh
+
+GREP=/usr/bin/grep
+export GREP
+PATH=/usr/bin:/usr/ccs/bin:/opt/csw/bin:/opt/csw/gnu:/export/home/wandisco/buildbot/install/bin
+export PATH
+
+prefix=/export/home/wandisco/buildbot/install
+
+if [ "$1" = "m4" ] ; then
+ shift
+ gunzip -c m4-1.4.14.tar.gz | tar xf -
+ cd m4-1.4.14
+ ./configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "autoconf" ] ; then
+ shift
+ gunzip -c autoconf-2.68.tar.gz | tar xf -
+ cd autoconf-2.68
+ ./configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "openssl" ] ; then
+ shift
+ gunzip -c openssl-1.0.1e.tar.gz | tar xf -
+ cd openssl-1.0.1e
+ ./Configure --prefix=$prefix solaris64-sparcv9-cc -xcode=pic32
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "apr" ] ; then
+ shift
+ gunzip -c apr-1.5.0.tar.gz | tar xf -
+ cd apr-1.5.0
+ CFLAGS='-m64' LDFLAGS='-m64' ./configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "apr-util" ] ; then
+ shift
+ gunzip -c apr-util-1.5.3.tar.gz | tar xf -
+ cd apr-util-1.5.3
+ CFLAGS='-m64' LDFLAGS='-m64' ./configure \
+ --prefix=$prefix \
+ --with-apr=$prefix/bin/apr-1-config
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "pcre" ] ; then
+ shift
+ gunzip -c pcre-8.34.tar.gz | xf -
+ cd pcre-8.34
+ CC='cc -m64' CXX='CC -m64' ./configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "httpd" ] ; then
+ shift
+ gunzip -c httpd-2.4.16.tar.gz | tar xf -
+ cd httpd-2.4.16
+ CFLAGS='-m64' LDFLAGS='-m64' ./configure \
+ --prefix=$prefix \
+ --with-apr=$prefix/bin/apr-1-config \
+ --with-apr-util=$prefix/bin/apu-1-config \
+ --with-ssl=$prefix \
+ --with-pcre=$prefix \
+ --enable-so \
+ --enable-mpms-shared=all \
+ --enable-mods-static='core log_config logio version unixd authn_core authz_core http' \
+ --enable-mods-shared='alias authz_user authn_file authn_basic dav ssl env mime'
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "python" ] ; then
+ shift
+ gunzip -c Python-2.7.5.tgz | tar xf -
+ cd Python-2.7.5
+ CC='cc -mt -m64' CXX='CC -mt -m64' ./configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "hashlib" ] ; then
+ shift
+ gunzip -c hashlib-20081119.tar.gz | tar xf -
+ cd hashlib-20081119
+ python setup.py build --openssl-prefix=$prefix
+ python setup.py install
+ cd ..
+fi
+
+if [ "$1" = "scons" ] ; then
+ shift
+ gunzip -c scons-2.3.0.tar.gz | tar xf -
+ cd scons-2.3.0
+ python setup.py install --prefix=$prefix
+ cd ..
+fi
+
+if [ "$1" = "serf" ] ; then
+ shift
+ gunzip -c serf-1.3.4.tar.gz | tar xf -
+ cd serf-1.3.4
+ patch -p0 ../serf.patch
+ scons install CC='cc -m64' \
+ PREFIX=$prefix APR=$prefix APU=$prefix OPENSSL=$prefix
+ cd ..
+fi
+
+if [ "$1" = "sqlite" ] ; then
+ shift
+ unzip sqlite-amalgamation-3071501.zip
+fi
+
+if [ "$1" = "pysqlite" ] ; then
+ shift
+ gunzip -c pysqlite-2.6.3.tar.gz | tar xf -
+ cd pysqlite-2.6.3
+ unzip ../sqlite-amalgamation-3071501.zip
+ mv sqlite-amalgamation-3071501/sqlite3.h src
+ mv sqlite-amalgamation-3071501/sqlite3.c .
+ python setup.py static_build
+ python setup.py install
+ cd ..
+fi
+
+if [ "$1" = "subversion" ] ; then
+ shift
+ gunzip -c subversion-1.8.8.tar.gz | tar xf -
+ cd subversion-1.8.8
+ unzip ../sqlite-amalgamation-3071501.zip
+ mv sqlite-amalgamation-3071501/ sqlite-amalgamation
+ LD_LIBRARY_PATH=/export/home/wandisco/buildbot/install/lib \
+ CC='cc -m64' ./configure \
+ --prefix=$prefix \
+ --with-apr=$prefix \
+ --with-apr-util=$prefix \
+ --with-serf=$prefix \
+ --with-apxs=$prefix/bin/apxs
+ make
+ cd ..
+fi
+
+if [ "$1" = "iconv" ] ; then
+ shift
+ gunzip -c libiconv-1.14.tar.gz | tar xf -
+ cd libiconv-1.14
+ CC='cc -m64' ./configure
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "automake" ] ; then
+ shift
+ gunzip -c automake-1.11.6.tar.gz | tar xf -
+ cd automake-1.11.6
+ configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "libtool" ] ; then
+ shift
+ gunzip -c libtool-2.2.10.tar.gz | tar -xf -
+ cd libtool-2.2.10
+ configure --prefix=$prefix
+ make
+ make install
+ cd ..
+fi
+
+if [ "$1" = "zope.interface" ] ; then
+ shift
+ gunzip -c zope.interface-4.1.0.tar.gz | tar xf -
+ cd zope.interface-4.1.0
+ python setup.py install --prefix=$prefix
+ cd ..
+fi
+
+if [ "$1" = "twisted" ] ; then
+ shift
+ gunzip -c Twisted-13.2.0.tar.gz | tar xf -
+ cd Twisted-13.2.0
+ patch -p0 < ../twisted.patch
+ python setup.py install --prefix=$prefix
+ cd ..
+fi
+
+if [ "$1" = "buildbot" ] ; then
+ shift
+ gunzip -c buildbot-slave-0.8.8.tar.gz | tar xf -
+ cd buildbot-slave-0.8.8
+ python setup.py install --prefix=$prefix
+ cd ..
+fi
+
+if [ -n "$1" ] ; then
+ echo "Don't know what to do with" $1
+fi
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/serf.patch b/tools/buildbot/slaves/svn-sparc-solaris/serf.patch
new file mode 100644
index 0000000..3d70ecc
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/serf.patch
@@ -0,0 +1,19 @@
+--- SConstruct.old Mon Mar 3 14:34:37 2014
++++ SConstruct Mon Mar 3 14:34:21 2014
+@@ -210,7 +210,7 @@
+ # Unfortunately we can't set the .dylib compatibility_version option separately
+ # from current_version, so don't use the PATCH level to avoid that build and
+ # runtime patch levels have to be identical.
+-env['SHLIBVERSION'] = '%d.%d.%d' % (MAJOR, MINOR, 0)
++#env['SHLIBVERSION'] = '%d.%d.%d' % (MAJOR, MINOR, 0)
+
+ LIBNAME = 'libserf-%d' % (MAJOR,)
+ if sys.platform != 'win32':
+@@ -248,6 +248,7 @@
+
+ if sys.platform == 'sunos5':
+ env.Append(LIBS='m')
++ env['PLATFORM'] = 'posix'
+ else:
+ # Warning level 4, no unused argument warnings
+ env.Append(CCFLAGS=['/W4', '/wd4100'])
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh b/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh
new file mode 100755
index 0000000..495cb21
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+echo "============ autogen.sh"
+./autogen.sh || exit $?
+
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+cd ../obj
+grep obj/subversion/tests /etc/mnttab > /dev/null || mount-tmpfs
+
+# --enable-optimize adds -flto which breaks the 1.8 C tests because
+# they link main() from a library.
+if [ $SVN_VER_MINOR -gt 8 ]; then
+ OPTIMIZE_OPTION='--enable-optimize'
+fi
+
+echo "============ configure"
+../build/configure CC='cc -m64 -v' \
+ --with-apr=/export/home/wandisco/buildbot/install \
+ --with-apr-util=/export/home/wandisco/buildbot/install \
+ --with-serf=/export/home/wandisco/buildbot/install \
+ --with-apxs=/export/home/wandisco/buildbot/install/bin/apxs \
+ --with-sqlite=/export/home/wandisco/buildbot/sqlite-amalgamation-3071501/sqlite3.c \
+ --disable-shared \
+ $OPTIMIZE_OPTION \
+ || exit $?
+
+echo "============ make"
+make -j30 || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh b/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh
new file mode 100755
index 0000000..ae15ca9
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh
@@ -0,0 +1,42 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+cd ../obj
+
+# Use GNU iconv since the system one does not work well enough
+LD_PRELOAD_64=/export/home/wandisco/buildbot/install/lib/preloadable_libiconv.so
+export LD_PRELOAD_64
+
+if [ $SVN_VER_MINOR -ge 10 ]; then
+ echo "============ make svnserveautocheck"
+ make svnserveautocheck CLEANUP=1 PARALLEL=30 THREADED=1 GLOBAL_SCHEDULER=1 || exit $?
+elif [ $SVN_VER_MINOR -ge 9 ]; then
+ echo "============ make svnserveautocheck"
+ make svnserveautocheck CLEANUP=1 PARALLEL=30 THREADED=1 || exit $?
+else
+ echo "============ make check"
+ make check CLEANUP=1 PARALLEL=30 THREADED=1 || exit $?
+fi
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh b/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh
new file mode 100755
index 0000000..b828e5e
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+cd ../obj
+
+echo "============ make extraclean"
+test -f Makefile && (make extraclean || exit $?)
+
+grep obj/subversion/tests /etc/mnttab > /dev/null && umount-tmpfs
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh b/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh
new file mode 100644
index 0000000..48d6b42
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+GREP=/usr/bin/grep
+export GREP
+PATH=/opt/csw/gnu:/usr/bin:/usr/ccs/bin:/opt/csw/bin:/export/home/wandisco/buildbot/install/bin
+export PATH
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/twisted.patch b/tools/buildbot/slaves/svn-sparc-solaris/twisted.patch
new file mode 100644
index 0000000..5dfcb23
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/twisted.patch
@@ -0,0 +1,11 @@
+--- twisted/python/sendmsg.c.old Mon Mar 3 14:32:08 2014
++++ twisted/python/sendmsg.c Mon Mar 3 14:30:22 2014
+@@ -6,6 +6,8 @@
+ #define PY_SSIZE_T_CLEAN 1
+ #include <Python.h>
+
++#define _XPG4_2 1
++
+ #if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+ /* This may cause some warnings, but if you want to get rid of them, upgrade
+ * your Python version. */
diff --git a/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh b/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh
new file mode 100755
index 0000000..793874c
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh
@@ -0,0 +1,34 @@
+#!/bin/sh
+# List the versions of all of SVN's dependencies.
+# The output is suitable for putting in the buildbot slave's 'info/host'
+# file, after a general description of the slave machine.
+
+echo "=== SVN dependencies ==="
+DEPS="gcc apr apr-util apr-devel apr-util-devel httpd httpd-devel \
+ neon neon-devel python python-devel ruby ruby-devel"
+#yum -C list $DEPS
+rpm -q ${DEPS} | sort | uniq
+# The SQLite version is found by the name of the amalgamation directory,
+# which is found in the home dir. It is also explicitly referenced in the
+# './configure' line in 'svnbuild.sh'.
+(cd && echo sqlite-3.*[0-9].*[0-9])
+echo
+
+echo "=== SVN test dependencies ==="
+#rpm -q pysqlite | sort | uniq
+echo
+
+JAVA_VER=`java -fullversion 2>&1`
+PY_VER=`python -V 2>&1`
+RUBY_VER=`ruby --version`
+PERL_VER=`perl -v | grep This`
+echo "=== interpreters / bindings ==="
+echo "Java: $JAVA_VER"
+echo "Python: $PY_VER"
+echo "Ruby: $RUBY_VER"
+echo "Perl: $PERL_VER"
+echo
+
+echo "=== BuildBot version ==="
+buildbot --version
+echo
diff --git a/tools/buildbot/slaves/svn-x64-centos/svnbuild.sh b/tools/buildbot/slaves/svn-x64-centos/svnbuild.sh
new file mode 100755
index 0000000..eea0863
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/svnbuild.sh
@@ -0,0 +1,80 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+export MAKEFLAGS=-j4
+export PYTHON=/usr/local/python25/bin/python
+
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+if [ $SVN_VER_MINOR -ge 9 ]; then
+ # 1.9 or newer requires APR 1.3.x and Serf 1.3.4
+ # this bubbles out to httpd as well. So use the right dependencies
+ APR=/home/bt/packages/apr-1.3.9-prefix/bin/apr-1-config
+ APU=/home/bt/packages/apr-1.3.9-prefix/bin/apu-1-config
+ APXS=/home/bt/packages/apr-1.3.9-prefix/bin/apxs
+ SERF=/home/bt/packages/apr-1.3.9-prefix
+else
+ APR=/usr
+ APU=/usr
+ APXS=/usr/sbin/apxs
+ SERF=/usr/local
+fi
+
+echo "========= autogen.sh"
+./autogen.sh || exit $?
+
+echo "========= configure"
+# --with-junit=/usr/share/java/junit.jar
+# --with-jdk=/usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0.x86_64 \
+# --without-berkeley-db \
+# --disable-shared \
+#CFLAGS='-fprofile-arcs -ftest-coverage' \
+./configure --enable-javahl --enable-maintainer-mode \
+ --with-neon=/usr \
+ --with-serf=$SERF \
+ --with-apxs=$APXS \
+ --with-berkeley-db \
+ --with-apr=$APR \
+ --with-apr-util=$APU \
+ --with-jdk=/opt/java/jdk1.6.0_15 \
+ --with-junit=/home/bt/junit-4.4.jar \
+ --with-sqlite=/home/bt/packages/sqlite-amalgamation-dir/sqlite3.c \
+ || exit $?
+
+echo "========= make"
+make || exit $?
+
+echo "========= make javahl"
+make javahl -j1 || exit $?
+
+echo "========= make swig-py"
+make swig-py || exit $?
+
+echo "========= make swig-pl"
+make swig-pl -j1 || exit $?
+
+echo "========= make swig-rb"
+make swig-rb -j1 || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh b/tools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh
new file mode 100755
index 0000000..4e8c1aa
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+RC=0
+
+echo "========= make check-javahl"
+make check-javahl || RC=$?
+
+echo "========= make check-swig-pl"
+make check-swig-pl || RC=$?
+
+echo "========= make check-swig-py"
+make check-swig-py || RC=$?
+
+# ruby test currently failing, generating SEGV on centos
+#echo "========= make check-swig-rb"
+#make check-swig-rb # || RC=$?
+
+exit ${RC}
diff --git a/tools/buildbot/slaves/svn-x64-centos/svncheck.sh b/tools/buildbot/slaves/svn-x64-centos/svncheck.sh
new file mode 100755
index 0000000..77ed04d
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/svncheck.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+set -x
+
+if test -z "$1" ; then
+ echo "Missing FS_TYPE specifier (arg #1)."
+ exit 1
+fi
+
+echo "========= mount RAM disc"
+# ignore the result: if it fails, the test will just take longer...
+mkdir -p subversion/tests/cmdline/svn-test-work
+test -e ../mount-ramdrive && ../mount-ramdrive
+
+echo "========= make"
+case "$2" in
+ ""|ra_serf)
+ make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || exit $?
+ ;;
+ ra_svn)
+ make svnserveautocheck FS_TYPE="$1" CLEANUP=1 || exit $?
+ ;;
+ ra_local)
+ make check FS_TYPE="$1" CLEANUP=1 || exit $?
+ ;;
+ *)
+ echo "Bad RA specifier (arg #2): '$2'."
+ exit 1
+ ;;
+esac
+
+# the bindings are checked with svncheck-bindings.sh
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-centos/svnclean.sh b/tools/buildbot/slaves/svn-x64-centos/svnclean.sh
new file mode 100755
index 0000000..9a5e715
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/svnclean.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+echo "========= unmount RAM disc"
+# ignore the result: if there was no ramdisc, that's fine
+test -e ../unmount-ramdrive && ../unmount-ramdrive
+
+echo "========= make extraclean"
+test -e Makefile && (make extraclean || exit $?)
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-centos/svnlog.sh b/tools/buildbot/slaves/svn-x64-centos/svnlog.sh
new file mode 100755
index 0000000..c2302e1
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/svnlog.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+# upload file to server
+FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
+tar -czf $FILENAME tests.log
+ftp -n www.mobsol.be < ../ftpscript
+rm $FILENAME
+
+echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh
new file mode 100755
index 0000000..74bc436
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+echo "========= autogen.sh"
+./autogen.sh || exit $?
+
+echo "========= configure"
+./configure --with-serf=./serf --with-apxs=/usr/sbin/apxs --without-berkeley-db --prefix=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64-serf/build/svninstall --with-apache-libexecdir=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64-serf/build/libexec || exit $?
+
+echo "========= make"
+make || exit $?
+
+echo "========= make swig-py"
+make swig-py || exit $?
+
+#echo "========= make swig-pl"
+#make swig-pl || exit $?
+
+#echo "========= make swig-rb"
+#make swig-rb || exit $?
+
+echo "========= make install"
+make install || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh
new file mode 100755
index 0000000..7233d3c
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+if test -z "$1" ; then
+ echo "Missing FS_TYPE specifier (arg #1)."
+ exit 1
+fi
+
+echo "========= make check"
+if [ "$2" = "ra_serf" ]; then
+ make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || s=$?;
+else
+ make davautocheck FS_TYPE=$1 CLEANUP=1 || s=$?;
+fi
+
+echo "========= cat tests.log"
+cat tests.log
+
+exit $s
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh
new file mode 100755
index 0000000..586e203
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# ../unmount_ramd.sh
+
+echo "========= make extraclean"
+test -e Makefile && { make extraclean || exit $?; }
+rm -rf ../build/*
+rm -rf .svn
+rm -rf .buildbot-sourcedata
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh
new file mode 100755
index 0000000..ce845f8
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# upload file to server
+FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
+tar -czf $FILENAME tests.log
+ftp -n www.mobsol.be < ../ftpscript
+rm $FILENAME
+
+echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME"
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh
new file mode 100755
index 0000000..a8750eb
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+echo "========= autogen.sh"
+./autogen.sh || exit $?
+
+echo "========= configure"
+./configure --without-serf --with-apxs=/usr/sbin/apxs --without-berkeley-db --prefix=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64/build/svninstall --with-apache-libexecdir=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64/build/libexec || exit $?
+
+echo "========= make"
+make || exit $?
+
+echo "========= make swig-py"
+make swig-py || exit $?
+
+#echo "========= make swig-pl"
+#make swig-pl || exit $?
+
+#echo "========= make swig-rb"
+#make swig-rb || exit $?
+
+echo "========= make install"
+make install || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh
new file mode 100755
index 0000000..7233d3c
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+if test -z "$1" ; then
+ echo "Missing FS_TYPE specifier (arg #1)."
+ exit 1
+fi
+
+echo "========= make check"
+if [ "$2" = "ra_serf" ]; then
+ make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || s=$?;
+else
+ make davautocheck FS_TYPE=$1 CLEANUP=1 || s=$?;
+fi
+
+echo "========= cat tests.log"
+cat tests.log
+
+exit $s
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh
new file mode 100755
index 0000000..586e203
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# ../unmount_ramd.sh
+
+echo "========= make extraclean"
+test -e Makefile && { make extraclean || exit $?; }
+rm -rf ../build/*
+rm -rf .svn
+rm -rf .buildbot-sourcedata
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh
new file mode 100755
index 0000000..ce845f8
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# upload file to server
+FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
+tar -czf $FILENAME tests.log
+ftp -n www.mobsol.be < ../ftpscript
+rm $FILENAME
+
+echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME"
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh b/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh
new file mode 100755
index 0000000..45708eb
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+
+if [ -z "$1" ]; then
+ echo "Missing parameter: volume name"
+ exit 1
+fi
+
+if [ -z "$2" ]; then
+ echo "Missing parameter: RAMdisk config file"
+ exit 1
+fi
+
+volume="/Volumes/$1"
+ramconf="$2"
+
+ramconfpath=$(dirname "${ramconf}")
+if [ ! -d "${ramconfpath}" ]; then
+ echo "Missing RAMdisk config file path: ${ramconfpath}"
+ exit 1
+fi
+if [ -f "${ramconf}" ]; then
+ echo "RAMdisk config file exists: ${ramconf}"
+ exit 1
+fi
+
+if [ -d "${volume}" ]; then
+ echo "Mount point exists: ${volume}"
+ exit 1
+fi
+
+mount | grep "^/dev/disk[0-9][0-9]* on ${volume} (hfs" >/dev/null || {
+ set -e
+ echo -n "" > "${ramconf}"
+
+ # Make sure we strip trailing spaces from the result of older
+ # versions of hduitil.
+ device=$(echo $(hdiutil attach -nomount ram://2000000))
+ newfs_hfs -M 0700 -v "$1" "${device}"
+ hdiutil mountvol "${device}"
+
+ echo -n "${device}" > "${ramconf}"
+}
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh b/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh
new file mode 100755
index 0000000..c5db9bb
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+
+if [ -z "$1" ]; then
+ echo "Missing parameter: volume name"
+ exit 1
+fi
+
+if [ -z "$2" ]; then
+ echo "Missing parameter: RAMdisk config file"
+ exit 1
+fi
+
+volume="/Volumes/$1"
+ramconf="$2"
+
+if [ ! -f "${ramconf}" ]; then
+ mount | grep "^/dev/disk[0-9][0-9]* on ${volume} (hfs" || {
+ echo "Not mounted: ${volume}"
+ exit 0
+ }
+ echo "Missing RAMdisk config file: ${ramconf}"
+ exit 1
+fi
+
+if [ ! -d "${volume}" ]; then
+ echo "Mount point missing: ${volume}"
+ exit 1
+fi
+
+device=$(cat "${ramconf}")
+devfmt=$(echo "${device}" | grep "^/dev/disk[0-9][0-9]*$")
+if [ "${device}" != "${devfmt}" ]; then
+ echo "Invalid device name: ${device}"
+ exit 1
+fi
+
+mount | grep "^${device} on ${volume} (hfs" >/dev/null && {
+ set -e
+ rm "${ramconf}"
+ hdiutil detach "${device}" -force
+}
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/setenv.sh b/tools/buildbot/slaves/svn-x64-macosx/setenv.sh
new file mode 100644
index 0000000..d4130e1
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/setenv.sh
@@ -0,0 +1,69 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+## This script calls a helper that provides the folloing environemnt
+## variables:
+##
+## PATH The search path
+## SVNBB_BDB Berkeley DB installation prefix
+## SVNBB_SWIG Swig installation prefix
+## SVNBB_SERF Serf installation prefix
+## Note: Serf should be built only
+## with the system APR/-Util.
+## SVNBB_APR_13_NOTHREAD Path of APR-1.3 with threading disabled
+## SVNBB_APR_15 Path of APR-1.5
+## SVNBB_APR_20_DEV Path of APR-2.0
+## SVNBB_JUNIT The path of the junit.jar
+## SVNBB_PARALLEL Optional: parallelization; defaults to 2
+## SVNBB_PYTHON3ENV Optional: Python 3 virtual environment
+##
+## The invoking script will set local variable named ${scripts} that
+## is the absolute path the parent of this file.
+
+# Modify this to suit your deployment
+environment=$(cd "${scripts}/../.." && pwd)/environment.sh
+
+eval $(${environment})
+SVNBB_PARALLEL="${SVNBB_PARALLEL-2}"
+
+export PATH
+export SVNBB_BDB
+export SVNBB_SWIG
+export SVNBB_SERF
+export SVNBB_APR_13_NOTHREAD
+export SVNBB_APR_15
+export SVNBB_APR_20_DEV
+export SVNBB_JUNIT
+export SVNBB_PARALLEL
+export SVNBB_PYTHON3ENV
+
+
+# Set the absolute source path
+abssrc=$(pwd)
+
+# Set the path to the RAMdisk device name file
+ramconf=$(dirname "${abssrc}")/ramdisk.conf
+
+# The RAMdisk volume name is the same as the name of the builder
+volume_name=$(basename $(dirname "${abssrc}"))
+if [ -z "${volume_name}" ]; then
+ echo "Missing config parameter: RAMdisk volume name"
+ exit 1
+fi
+
+# Set the absolute build path
+absbld="/Volumes/${volume_name}"
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh b/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh
new file mode 100755
index 0000000..fa085dd
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+#
+# Step 4: build swig-py
+#
+
+echo "============ make swig-py"
+cd ${absbld}
+make swig-py
+
+echo "============ make swig-pl"
+cd ${absbld}
+make swig-pl
+
+echo "============ make swig-rb"
+cd ${absbld}
+make swig-rb
+
+echo "============ make javahl"
+cd ${absbld}
+make javahl
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh
new file mode 100755
index 0000000..eff8986
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+${scripts}/mkramdisk.sh ${volume_name} ${ramconf}
+
+# These are the default APR and Serf config options
+serfconfig="--with-serf=${SVNBB_SERF} --with-apxs=/usr/sbin/apxs"
+
+# An optional parameter tells build scripts which version of APR to use
+if [ ! -z "$1" ]; then
+ aprdir=$(eval 'echo $SVNBB_'"$1")
+fi
+if [ ! -z "${aprdir}" -a -d "${aprdir}" ]; then
+ aprconfig="--with-apr=${aprdir} --with-apr-util=${aprdir}"
+ serfconfig=" --without-serf --without-apxs"
+fi
+
+#
+# Step 0: Create a directory for the test log files
+#
+if [ -d "${abssrc}/.test-logs" ]; then
+ rm -fr "${abssrc}/.test-logs"
+fi
+mkdir "${abssrc}/.test-logs" || exit 1
+
+#
+# Step 1: get the latest and greatest amalgamanted SQLite
+#
+
+echo "============ get-deps.sh sqlite"
+cd ${abssrc}
+rm -fr sqlite-amalgamation
+./get-deps.sh sqlite
+
+#
+# Step 2: Regenerate build scripts
+#
+
+echo "============ autogen.sh"
+cd ${abssrc}
+./autogen.sh
+
+svnminor=$(awk '/define *SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h)
+
+# --enable-optimize adds -flto which breaks the 1.8 C tests because
+# they link main() from a library.
+if [ ${svnminor} -gt 8 ]; then
+ optimizeconfig=' --enable-optimize'
+fi
+
+if [ ${svnminor} -ge 10 ]; then
+ lz4config='--with-lz4=internal'
+ utf8proconfig='--with-utf8proc=internal'
+fi
+
+#
+# Step 3: Configure
+#
+
+echo "============ configure"
+cd ${absbld}
+env CC=clang CXX=clang++ \
+${abssrc}/configure \
+ --prefix="${absbld}/.install-prefix" \
+ --enable-debug${optimizeconfig} \
+ --disable-nls \
+ --disable-mod-activation \
+ ${aprconfig}${serfconfig} \
+ --with-swig="${SVNBB_SWIG}" \
+ --with-berkeley-db=db.h:"${SVNBB_BDB}/include":${SVNBB_BDB}/lib:db \
+ --enable-javahl \
+ --without-jikes \
+ ${lz4config} \
+ ${utf8proconfig} \
+ --with-junit="${SVNBB_JUNIT}"
+
+test -f config.log && mv config.log "${abssrc}/.test-logs/config.log"
+
+#
+# Step 4: build
+#
+
+echo "============ make"
+cd ${absbld}
+make -j${SVNBB_PARALLEL}
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh b/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh
new file mode 100755
index 0000000..1fb538b
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+run_tests() {
+ check="$1"
+ cleanup="$2"
+
+ echo "============ make check-${check}"
+ cd ${absbld}
+ make check-${check} ${cleanup} || exit 1
+}
+
+
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+# Parse arguments to find out which tests we should run
+check_swig_py=false
+check_swig_pl=false
+check_swig_rb=false
+check_javahl=false
+
+while [ ! -z "$1" ]; do
+ case "$1" in
+ swig-py) check_swig_py=true;;
+ swig-pl) check_swig_pl=true;;
+ swig-rb) check_swig_rb=true;;
+ javahl) check_javahl=true;;
+ *) exit 1;;
+ esac
+ shift
+done
+
+${check_swig_py} && run_tests swig-py
+${check_swig_pl} && run_tests swig-pl
+${check_swig_rb} && run_tests swig-rb
+${check_javahl} && run_tests javahl JAVAHL_CLEANUP=1
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh
new file mode 100755
index 0000000..0d24286
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+run_tests() {
+ ra="$1"
+ fs="$2"
+ ok=true
+
+ case "${ra}" in
+ local) check=check; skipC=;;
+ svn) check=svnserveautocheck; skipC="SKIP_C_TESTS=1";;
+ dav) check=davautocheck; skipC="SKIP_C_TESTS=1";;
+ *) exit 1;;
+ esac
+
+ echo "============ make check ${ra}+${fs}"
+ cd ${absbld}
+ make ${check} FS_TYPE=${fs} PARALLEL=${SVNBB_PARALLEL} CLEANUP=1 ${skipC} || ok=false
+
+ # Move any log files to the buildbot work directory
+ test -f tests.log && mv tests.log "${abssrc}/.test-logs/tests-${ra}-${fs}.log"
+ test -f fails.log && mv fails.log "${abssrc}/.test-logs/fails-${ra}-${fs}.log"
+
+ # Remove the test working directory to make space on the RAM disk
+ # for more tests.
+ rm -fr subversion/tests/cmdline/svn-test-work
+
+ ${ok} || exit 1
+}
+
+check_tests() {
+ ra="$1"
+
+ ${check_fsfs} && run_tests ${ra} fsfs
+ ${check_fsfs_v6} && run_tests ${ra} fsfs-v6
+ ${check_fsfs_v4} && run_tests ${ra} fsfs-v4
+ ${check_bdb} && run_tests ${ra} bdb
+ ${check_fsx} && run_tests ${ra} fsx
+}
+
+
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+# Parse arguments to find out which tests we should run
+use_python3=false
+check_local=false
+check_svn=false
+check_dav=false
+check_fsfs=false
+check_fsfs_v6=false
+check_fsfs_v4=false
+check_fsx=false
+check_bdb=false
+
+while [ ! -z "$1" ]; do
+ case "$1" in
+ python3) use_python3=true;;
+ local) check_local=true;;
+ svn) check_svn=true;;
+ dav) check_dav=true;;
+ fsfs) check_fsfs=true;;
+ fsfs-v6) check_fsfs_v6=true;;
+ fsfs-v4) check_fsfs_v4=true;;
+ fsx) check_fsx=true;;
+ bdb) check_bdb=true;;
+ *) exit 1;;
+ esac
+ shift
+done
+
+${use_python3} && test -n "${SVNBB_PYTHON3ENV}" && . ${SVNBB_PYTHON3ENV}/bin/activate
+
+${check_local} && check_tests local
+${check_svn} && check_tests svn
+${check_dav} && check_tests dav
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh
new file mode 100755
index 0000000..b2b0bb3
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+${scripts}/rmramdisk.sh ${volume_name} ${ramconf}
diff --git a/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh b/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh
new file mode 100755
index 0000000..e8005b9
--- /dev/null
+++ b/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+export MAKEFLAGS=-j4
+
+echo "========= autogen.sh"
+./autogen.sh || exit $?
+
+echo "========= configure"
+./configure --enable-javahl --enable-maintainer-mode \
+ --without-berkeley-db \
+ --with-jdk=/usr/lib/jvm/java-7-openjdk-amd64/ \
+ --with-junit=/usr/share/java/junit.jar || exit $?
+
+echo "========= make"
+make || exit $?
+
+echo "========= make javahl"
+make javahl -j1 || exit $?
+
+echo "========= make swig-py"
+make swig-py || exit $?
+
+echo "========= make swig-pl"
+make swig-pl -j1 || exit $?
+
+echo "========= make swig-rb"
+make swig-rb -j1 || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh b/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh
new file mode 100755
index 0000000..f42c0ec
--- /dev/null
+++ b/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+RC=0
+
+echo "========= make check-javahl"
+make check-javahl || RC=$?
+
+echo "========= make check-swig-pl"
+make check-swig-pl || RC=$?
+
+echo "========= make check-swig-py"
+make check-swig-py || RC=$?
+
+echo "========= make check-swig-rb"
+make check-swig-rb || RC=$?
+
+exit ${RC}
diff --git a/tools/buildbot/slaves/ubuntu-x64/svncheck.sh b/tools/buildbot/slaves/ubuntu-x64/svncheck.sh
new file mode 100755
index 0000000..227e968
--- /dev/null
+++ b/tools/buildbot/slaves/ubuntu-x64/svncheck.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+set -x
+
+if test -z "$1" ; then
+ echo "Missing FS_TYPE specifier (arg #1)."
+ exit 1
+fi
+
+echo "========= mount RAM disc"
+test ! -e /dev/shm/svn-test-work && mkdir /dev/shm/svn-test-work
+test -e subversion/tests/cmdline/svn-test-work && rm -rf subversion/tests/cmdline/svn-test-work
+ln -s /dev/shm/svn-test-work subversion/tests/cmdline/
+
+echo "========= make check"
+make check FS_TYPE=$1 CLEANUP=1 || exit $?
+
+# the bindings are checked with svncheck-bindings.sh
+exit 0
diff --git a/tools/buildbot/slaves/ubuntu-x64/svnclean.sh b/tools/buildbot/slaves/ubuntu-x64/svnclean.sh
new file mode 100755
index 0000000..4f886ac
--- /dev/null
+++ b/tools/buildbot/slaves/ubuntu-x64/svnclean.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+echo "========= make extraclean"
+cd build
+test -e Makefile && { make extraclean || exit $?; }
+
+exit 0
diff --git a/tools/buildbot/slaves/ubuntu-x64/svnlog.sh b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh
new file mode 100755
index 0000000..c2302e1
--- /dev/null
+++ b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+set -x
+
+# upload file to server
+FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
+tar -czf $FILENAME tests.log
+ftp -n www.mobsol.be < ../ftpscript
+rm $FILENAME
+
+echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
+
+exit 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template
new file mode 100644
index 0000000..d283de6
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template
@@ -0,0 +1,50 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+CALL "c:\Program Files\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" x86
+
+SET TESTDIR=E:\Full
+SET TESTPORT=1234
+SET "NANTARGS= "
+
+SET TEMP=%TESTDIR%\temp
+SET TMP=%TEMP%
+
+IF NOT EXIST "%TESTDIR%\" MKDIR "%TESTDIR%"
+IF NOT EXIST "%TEMP%\" MKDIR "%TEMP%"
+
+
+
+
+
+SET SVN_URL=
+SET SVN_RELURL=
+for /F "usebackq tokens=1,* delims=:" %%i IN (`svn info .`) do (
+
+ IF "%%i" == "URL" (
+ SET SVN_URL=%%j
+ ) ELSE IF "%%i" == "Relative URL" (
+ SET SVN_RELURL=%%j
+ )
+)
+SET SVN_URL=%SVN_URL:~1%
+SET SVN_RELURL=%SVN_RELURL:~3%
+SET SVN_SUBBRANCH=%SVN_RELURL:~11%
+SET SVN_BRANCH=%SVN_SUBBRANCH:branches/=%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
new file mode 100644
index 0000000..67b9b8e
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
@@ -0,0 +1,98 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
+ ECHO --- Building 1.6.x or older: Skipping bindings ---
+ EXIT /B 0
+)
+
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
+
+ PATH %TESTDIR%\bin;!PATH!
+)
+
+SET result=0
+
+if "%SVN_BRANCH%" GTR "1.9." (
+
+ python win-tests.py -r -f fsfs --swig=python "%TESTDIR%\tests"
+
+ IF ERRORLEVEL 1 (
+ echo [Python tests reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+ ) ELSE (
+ echo Done.
+ )
+
+) ELSE (
+ IF EXIST "%TESTDIR%\swig" rmdir /s /q "%TESTDIR%\swig"
+ mkdir "%TESTDIR%\swig\py-release\libsvn"
+ mkdir "%TESTDIR%\swig\py-release\svn"
+
+ xcopy "release\subversion\bindings\swig\python\*.pyd" "%TESTDIR%\swig\py-release\libsvn\*.pyd" > nul:
+ xcopy "release\subversion\bindings\swig\python\libsvn_swig_py\*.dll" "%TESTDIR%\swig\py-release\libsvn\*.dll" > nul:
+ xcopy "subversion\bindings\swig\python\*.py" "%TESTDIR%\swig\py-release\libsvn\*.py" > nul:
+ xcopy "subversion\bindings\swig\python\svn\*.py" "%TESTDIR%\swig\py-release\svn\*.py" > nul:
+
+ SET PYTHONPATH=%TESTDIR%\swig\py-release
+
+ python subversion\bindings\swig\python\tests\run_all.py
+ IF ERRORLEVEL 1 (
+ echo [Python tests reported error !ERRORLEVEL!] 1>&2
+ REM SET result=1
+ ) ELSE (
+ echo Done.
+ )
+)
+
+if "%SVN_BRANCH%" GTR "1.9." (
+
+ python win-tests.py -d -f fsfs --swig=perl "%TESTDIR%\tests"
+
+ IF ERRORLEVEL 1 (
+ echo [Perl tests reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+ ) ELSE (
+ echo Done.
+ )
+
+)
+
+if "%SVN_BRANCH%" GTR "1.9." (
+ python win-tests.py -d -f fsfs --swig=ruby "%TESTDIR%\tests"
+
+ IF ERRORLEVEL 1 (
+ echo [Ruby tests reported error !ERRORLEVEL!] 1>&2
+ REM SET result=1
+ ) ELSE (
+ echo Done.
+ )
+
+ taskkill /im svnserve.exe /f
+)
+
+exit /b %result%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
new file mode 100644
index 0000000..a3eb1ed
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
@@ -0,0 +1,48 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
+ ECHO --- Building 1.6.x: Skipping bindings ---
+ EXIT /B 0
+)
+
+SET DEBUG_TARGETS=/t:__ALL_TESTS__
+SET RELEASE_TARGETS=/t:__SWIG_PYTHON__
+
+if "%SVN_BRANCH%" GTR "1.9." (
+ SET DEBUG_TARGETS=%DEBUG_TARGETS% /t:__SWIG_PERL__
+)
+
+if "%SVN_BRANCH%" GTR "1.9." (
+ SET DEBUG_TARGETS=%DEBUG_TARGETS% /t:__SWIG_RUBY__
+)
+
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Debug /p:Platform=Win32 %DEBUG_TARGETS%
+IF ERRORLEVEL 1 EXIT /B 1
+
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Release /p:Platform=Win32 %RELEASE_TARGETS%
+IF ERRORLEVEL 1 EXIT /B 1
+
+EXIT /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd
new file mode 100644
index 0000000..41a8438
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd
@@ -0,0 +1,36 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+PUSHD ..\deps
+
+nant gen-dev -D:wc=..\build -D:impBase=../deps/build/win32 -D:botBuild=true %SVN_NANT_ARGS%
+IF ERRORLEVEL 1 EXIT /B 1
+
+POPD
+
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Debug /p:Platform=Win32 /t:__ALL_TESTS__ %SVN_MSBUILD_ARGS%
+IF ERRORLEVEL 1 EXIT /B 1
+
+EXIT /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
new file mode 100644
index 0000000..571e2c0
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
@@ -0,0 +1,80 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+
+IF NOT EXIST "..\deps\" MKDIR "..\deps"
+
+PUSHD ..\deps
+ECHO Checking dependencies in %CD%
+
+IF NOT EXIST "imports\" (
+ svn co https://ctf.open.collab.net/svn/repos/sharpsvn/trunk/imports imports --username guest --password ""
+)
+IF NOT EXIST build\imports.done (
+ svn up imports --username guest --password ""
+ copy /y imports\dev-default.build default.build
+ nant prep-dev %NANTARGS%
+ IF ERRORLEVEL 1 (
+ EXIT /B 1
+ )
+ del release\bin\*svn* release\bin\_*.* 2>nul:
+ ECHO. > build\imports.done
+)
+
+POPD
+
+PUSHD "%TEMP%"
+IF NOT ERRORLEVEL 1 (
+ rmdir /s /q "%TEMP%" 2> nul:
+)
+POPD
+
+
+taskkill /im msbuild.exe /f 2> nul:
+taskkill /im svn.exe /f 2> nul:
+taskkill /im svnlook.exe /f 2> nul:
+taskkill /im svnadmin.exe /f 2> nul:
+taskkill /im svnserve.exe /f 2> nul:
+taskkill /im svnrdump.exe /f 2> nul:
+taskkill /im svnsync.exe /f 2> nul:
+taskkill /im httpd.exe /f 2> nul:
+taskkill /im client-test.exe /f 2> nul:
+taskkill /im fs-test.exe /f 2> nul:
+taskkill /im op-depth-test.exe /f 2> nul:
+taskkill /im atomic-ra-revprop-change.exe /f 2> nul:
+taskkill /im java.exe /f 2> nul:
+taskkill /im perl.exe /f 2> nul:
+taskkill /im ruby.exe /f 2> nul:
+taskkill /im mspdbsrv.exe /f 2> nul:
+
+IF EXIST "%TESTDIR%\swig\" (
+ rmdir /s /q "%TESTDIR%\swig"
+)
+
+IF EXIST "%TESTDIR%\tests\" (
+ PUSHD "%TESTDIR%\tests\"
+ rmdir /s /q "%TESTDIR%\tests\" 2> nul:
+ POPD
+)
+
+exit /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd
new file mode 100644
index 0000000..0b0a507
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd
@@ -0,0 +1,46 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
+ ECHO --- Building 1.6.x or older: Skipping JavaHL ---
+ EXIT /B 0
+)
+
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
+
+ PATH %TESTDIR%\bin;!PATH!;!JAVADIR!
+)
+
+SET result=0
+
+python win-tests.py -d -f fsfs --javahl "%TESTDIR%\tests"
+IF ERRORLEVEL 1 (
+ ECHO [JavaHL test runner reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+)
+
+EXIT /b %result%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd
new file mode 100644
index 0000000..1034173
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd
@@ -0,0 +1,24 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd
new file mode 100644
index 0000000..18a130b
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd
@@ -0,0 +1,98 @@
+@ECHO off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+
+SET MODE=-d
+SET PARALLEL=
+SET ARGS=
+
+SET FSFS=
+SET LOCAL=
+SET RC=0
+:next
+
+IF "%1" == "-r" (
+ SET MODE=-r
+ SHIFT
+) ELSE IF "%1" == "-d" (
+ SET MODE=-d
+ SHIFT
+) ELSE IF "%1" == "-p" (
+ SET PARALLEL=-p
+ SHIFT
+) ELSE IF "%1" == "fsfs" (
+ SET FSFS=1
+ SHIFT
+) ELSE IF "%1" == "local" (
+ SET LOCAL=1
+ SHIFT
+) ELSE IF "%1" == "svn" (
+ SET SVN=1
+ SHIFT
+) ELSE IF "%1" == "serf" (
+ SET DAV=1
+ SHIFT
+) ELSE IF "%1" == "dav" (
+ SET DAV=1
+ SHIFT
+) ELSE (
+ SET ARGS=!ARGS! -t %1
+ SHIFT
+)
+
+IF NOT "%1" == "" GOTO next
+
+taskkill /im svnserve.exe httpd.exe /f 2> nul:
+
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
+
+ PATH %TESTDIR%\bin;!PATH!
+)
+
+IF "%LOCAL%+%FSFS%" == "1+1" (
+ echo win-tests.py -c %PARALLEL% %MODE% -f fsfs %ARGS% "%TESTDIR%\tests"
+ win-tests.py -c %PARALLEL% %MODE% -f fsfs %ARGS% "%TESTDIR%\tests"
+ IF ERRORLEVEL 1 SET RC=1
+)
+
+IF "%SVN%+%FSFS%" == "1+1" (
+ echo win-tests.py -c %PARALLEL% %MODE% -f fsfs -u svn://127.0.0.1 %ARGS% "%TESTDIR%\tests"
+ win-tests.py -c %PARALLEL% %MODE% -f fsfs -u svn://127.0.0.1 %ARGS% "%TESTDIR%\tests"
+ IF ERRORLEVEL 1 SET RC=1
+)
+
+IF "%DAV%+%FSFS%" == "1+1" (
+ echo win-tests.py -c %PARALLEL% %MODE% -f fsfs --httpd-no-log --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://127.0.0.1:%TESTPORT% %ARGS% "%TESTDIR%\tests"
+ win-tests.py -c %PARALLEL% %MODE% -f fsfs --httpd-no-log --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://127.0.0.1:%TESTPORT% %ARGS% "%TESTDIR%\tests"
+ IF ERRORLEVEL 1 SET RC=1
+)
+
+IF EXIST "%TEMP%\svn-*" (
+ echo "SVN Files left:"
+ dir "%TEMP%"
+)
+EXIT /B %RC%
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/config.bat b/tools/buildbot/slaves/win32-xp-VS2005/config.bat
new file mode 100644
index 0000000..2178f93
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/config.bat
@@ -0,0 +1,40 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+set HTTPD_BIN_DIR=C:\Apache2
+set GETTEXT_DIR=C:\svn-builder\djh-xp-vse2005\gettext
+set TEST_DIR=M:\svn-auto-test
+
+set HTTPD_SRC_DIR=..\httpd
+set BDB_DIR=..\db4-win32
+set NEON_DIR=..\neon
+set ZLIB_DIR=..\zlib
+set OPENSSL_DIR=..\openssl
+set INTL_DIR=..\svn-libintl
+
+REM Uncomment this if you want clean subversion build, after testing
+REM set CLEAN_SVN=1
+
+REM Uncomment this if you want disable ra_svn tests
+REM set NO_RA_SVN=1
+
+REM Uncomment this if you want disable ra_dav tests
+REM set NO_RA_HTTP=1
+
+set PATH=%GETTEXT_DIR%\bin;%PATH%
+call C:\VCX2005\VC\vcvarsall.bat x86
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat b/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat
new file mode 100644
index 0000000..6848f9d
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat
@@ -0,0 +1,24 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+cmd.exe /c ..\svnbuild.bat > build.log
+cmd.exe /c ..\svncheck.bat fsfs ra_local > fsfs_local.log
+cmd.exe /c ..\svncheck.bat fsfs ra_svn > fsfs_svn.log
+cmd.exe /c ..\svncheck.bat fsfs ra_dav > fsfs_dav.log
+cmd.exe /c ..\svncheck.bat bdb ra_local > bdb_local.log
+cmd.exe /c ..\svncheck.bat bdb ra_svn > bdb_svn.log
+cmd.exe /c ..\svncheck.bat bdb ra_dav > bdb_dav.log
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat
new file mode 100644
index 0000000..3724c85
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat
@@ -0,0 +1,56 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+cmd.exe /c call ..\svnclean.bat
+
+set PARAMS=-t vcproj --vsnet-version=2005 --with-berkeley-db=%BDB_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR% --with-libintl=%INTL_DIR%
+REM set PARAMS=-t vcproj --vsnet-version=2005 --with-berkeley-db=%BDB_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR%
+IF NOT "%OPENSSL_DIR%"=="" set PARAMS=%PARAMS% --with-openssl=%OPENSSL_DIR%
+
+python gen-make.py %PARAMS%
+IF ERRORLEVEL 1 GOTO ERROR
+
+REM MSDEV.COM %HTTPD_SRC_DIR%\apache.dsw /MAKE "BuildBin - Win32 Release"
+REM IF ERRORLEVEL 1 GOTO ERROR
+
+rem MSBUILD subversion_vcnet.sln /t:__ALL_TESTS__ /p:Configuration=Debug
+MSBUILD subversion_vcnet.sln /t:__ALL_TESTS__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__SWIG_PYTHON__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__SWIG_PERL__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__JAVAHL__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+
+EXIT 0
+
+REM ----------------------------------------------------
+:ERROR
+ECHO.
+ECHO *** Whoops, something choked.
+ECHO.
+CD ..
+EXIT 1
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat b/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat
new file mode 100644
index 0000000..91a3b62
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat
@@ -0,0 +1,76 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+set FS_TYPE=%1
+set RA_TYPE=%2
+
+REM By default, return zero
+set ERR=0
+
+if "%RA_TYPE%"=="ra_local" goto ra_local
+if "%RA_TYPE%"=="ra_svn" goto ra_svn
+if "%RA_TYPE%"=="ra_dav" goto ra_dav
+
+echo Unknown ra method '%RA_TYPE%'
+EXIT 3
+
+:ra_local
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\tests.log:
+type %TEST_DIR%\%FS_TYPE%\tests.log
+echo End of log for %FS_TYPE%\tests.log
+echo.
+EXIT %ERR%
+
+:ra_svn
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r -u svn://localhost
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\svn-tests.log:
+type %TEST_DIR%\%FS_TYPE%\svn-tests.log
+echo End of log for %FS_TYPE%\svn-tests.log
+echo.
+EXIT %ERR%
+
+:ra_dav
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r --httpd-dir="%HTTPD_BIN_DIR%" --httpd-port 1234
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\dav-tests.log:
+type %TEST_DIR%\%FS_TYPE%\dav-tests.log
+echo End of log for %FS_TYPE%\dav-tests.log
+echo.
+EXIT %ERR%
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat
new file mode 100644
index 0000000..9c05b25
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat
@@ -0,0 +1,30 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+REM if NOT "%CLEAN_SVN%"=="" MSBUILD subversion_vcnet.sln /t:Clean /p:Configuration=Release
+rmdir /s /q Release
+rmdir /s /q %TEST_DIR%
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat
new file mode 100644
index 0000000..df6760a
--- /dev/null
+++ b/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat
@@ -0,0 +1,30 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+REM IF NOT EXIST ..\config.bat GOTO noconfig
+REM call ..\config.bat
+
+echo.
+echo Detailed test logs included in svncheck.bat log.
+echo.
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl b/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl
new file mode 100644
index 0000000..1221b7a
--- /dev/null
+++ b/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl
@@ -0,0 +1,44 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+set MSDEV=C:\Program Files\VS6.0
+set HTTPD_BIN_DIR=C:\Program Files\Apache2
+set TEST_DIR=C:\tmp\svn-auto-test
+set SDK_DIR=C:\Program Files\VS.NET\Vc7\PlatformSDK\
+
+set HTTPD_SRC_DIR=..\httpd
+set BDB_DIR=..\db4-win32
+set NEON_DIR=..\neon
+set ZLIB_DIR=..\zlib
+set OPENSSL_DIR=..\openssl
+set INTL_DIR=..\svn-win32-libintl
+
+REM Uncomment this if you want clean subversion build, after testing
+REM set CLEAN_SVN=1
+
+REM Uncomment this if you want disable ra_svn tests
+REM set NO_RA_SVN=1
+
+REM Uncomment this if you want disable ra_dav tests
+REM set NO_RA_HTTP=1
+
+set SDKINC=%SDK_DIR%\include
+set SDKLIB=%SDK_DIR%\lib
+
+set PATH=%MSDEV%\VC98\Bin;%MSDEV%\Common\MSDev98\Bin\;%PATH%
+set INCLUDE=%SDKINC%;%MSDEV%\VC98\ATL\INCLUDE;%MSDEV%\VC98\INCLUDE;%MSDEV%\VC98\MFC\INCLUDE
+set LIB=%SDKLIB%;%MSDEV%\VC98\LIB;%MSDEV%\VC98\MFC\LIB
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat
new file mode 100644
index 0000000..2d5a671
--- /dev/null
+++ b/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat
@@ -0,0 +1,77 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+IF NOT "%OPENSSL_DIR%"=="" call :openssl
+IF ERRORLEVEL 1 GOTO ERROR
+
+set PARAMS=-t dsp --with-berkeley-db=%BDB_DIR% --with-libintl=%INTL_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR% --enable-bdb-in-apr-util
+IF NOT "%OPENSSL_DIR%"=="" set PARAMS=%PARAMS% --with-openssl=%OPENSSL_DIR%
+
+python gen-make.py %PARAMS%
+IF ERRORLEVEL 1 GOTO ERROR
+
+MSDEV.COM %HTTPD_SRC_DIR%\apache.dsw /MAKE "BuildBin - Win32 Release"
+IF ERRORLEVEL 1 GOTO ERROR
+
+MSDEV.COM subversion_msvc.dsw /USEENV /MAKE "__ALL_TESTS__ - Win32 Release"
+IF ERRORLEVEL 1 GOTO ERROR
+
+
+EXIT 0
+
+REM ----------------------------------------------------
+:ERROR
+ECHO.
+ECHO *** Whoops, something choked.
+ECHO.
+CD ..
+EXIT 1
+
+
+:openssl
+rem ====== Build openssl.
+pushd %OPENSSL_DIR%
+perl Configure VC-WIN32
+IF ERRORLEVEL 1 goto openssl-err1
+
+call ms\do_ms
+IF ERRORLEVEL 1 goto openssl-err1
+
+nmake -f ms\ntdll.mak /NOLOGO /S
+IF ERRORLEVEL 1 goto openssl-err1
+
+pushd out32dll
+call ..\ms\test
+IF ERRORLEVEL 1 goto openssl-err2
+
+popd
+popd
+EXIT /B 0
+
+:openssl-err2
+popd
+
+:openssl-err1
+popd
+EXIT 1
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat b/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat
new file mode 100644
index 0000000..70e8768
--- /dev/null
+++ b/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat
@@ -0,0 +1,51 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+set FS_TYPE=%1
+set RA_TYPE=%2
+
+REM By default, return zero
+set ERR=0
+
+if "%RA_TYPE%"=="ra_local" goto ra_local
+if "%RA_TYPE%"=="ra_svn" goto ra_svn
+if "%RA_TYPE%"=="ra_dav" goto ra_dav
+
+echo Unknown ra method '%RA_TYPE%'
+EXIT 3
+
+:ra_local
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:ra_svn
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r -u svn://localhost
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:ra_dav
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r --httpd-dir="%HTTPD_BIN_DIR%" --httpd-port 1234
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat
new file mode 100644
index 0000000..64df9b8
--- /dev/null
+++ b/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat
@@ -0,0 +1,28 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+if NOT "%CLEAN_SVN%"=="" MSDEV.COM subversion_msvc.dsw /MAKE "__ALL_TESTS__ - Win32 Release" /CLEAN
+if ERRORLEVEL 1 EXIT 1
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat
new file mode 100644
index 0000000..bb0d872
--- /dev/null
+++ b/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat
@@ -0,0 +1,25 @@
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/client-side/bash_completion b/tools/client-side/bash_completion
new file mode 100644
index 0000000..53632fb
--- /dev/null
+++ b/tools/client-side/bash_completion
@@ -0,0 +1,1625 @@
+# ------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ------------------------------------------------------------
+
+# Programmable completion for the Subversion svn command under bash. Source
+# this file (or on some systems add it to ~/.bash_completion and start a new
+# shell) and bash's completion mechanism will know all about svn's options!
+# Provides completion for the svnadmin, svndumpfilter, svnlook and svnsync
+# commands as well. Who wants to read man pages/help text...
+
+# Known to work with bash 3.* with programmable completion and extended
+# pattern matching enabled (use 'shopt -s extglob progcomp' to enable
+# these if they are not already enabled).
+
+shopt -s extglob
+
+# Tree helper functions which only use bash, to ease readability.
+
+# look for value associated to key from stdin in K/V hash file format
+# val=$(_svn_read_hashfile svn:realmstring < some/file)
+function _svn_read_hashfile()
+{
+ local tkey=$1 key= val=
+ while true; do
+ read tag len
+ [ $tag = 'END' ] && break
+ [ $tag != 'K' ] && {
+ #echo "unexpected tag '$tag' instead of 'K'" >&2
+ return
+ }
+ read -r -n $len key ; read
+ read tag len
+ [ $tag != 'V' ] && {
+ #echo "unexpected tag '$tag' instead of 'V'" >&2
+ return
+ }
+ read -r -n $len val ; read
+ if [[ $key = $tkey ]] ; then
+ echo "$val"
+ return
+ fi
+ done
+ #echo "target key '$tkey' not found" >&2
+}
+
+# _svn_grcut shell-regular-expression
+# extract filenames from 'svn status' output
+function _svn_grcut()
+{
+ local re=$1 line= old_IFS
+ # fix IFS, so that leading spaces are not ignored by next read.
+ # (there is a leading space in svn status output if only a prop is changed)
+ old_IFS="$IFS"
+ IFS=$'\n'
+ while read -r line ; do
+ [[ ! $re || $line == $re ]] && echo "${line/????????/}"
+ done
+ IFS="$old_IFS"
+}
+
+# extract stuff from svn info output
+# _svn_info (URL|Repository Root)
+function _svn_info()
+{
+ local what=$1 line=
+ LANG=C LC_MESSAGES=C svn info --non-interactive 2> /dev/null | \
+ while read line ; do
+ [[ $line == *"$what: "* ]] && echo ${line#*: }
+ done
+}
+
+# broken since svn 1.7 | FIXME: change to svn status -v ?
+# _svn_lls (dir|file|all) files...
+# list svn-managed files from list
+# some 'svn status --all-files' would be welcome here?
+function _svn_lls()
+{
+ local opt=$1 f=
+ shift
+ for f in "$@" ; do
+ # could try to check in .svn/entries? hmmm...
+ if [[ $opt == @(dir|all) && -d "$f" ]] ; then
+ echo "$f/"
+ elif [[ $opt == @(file|all) ]] ; then
+ # split f in directory/file names
+ local dn= fn="$f"
+ [[ "$f" == */* ]] && dn=${f%\/*}/ fn=${f##*\/}
+ # ??? this does not work for just added files, because they
+ # do not have a content reference yet...
+ [ -f "${dn}.svn/text-base/${fn}.svn-base" ] && echo "$f"
+ fi
+ done
+}
+
+# try to complete TARGET
+# 1. [nothing] lists available protocols
+# 2. svn+ssh:// lists servers from .ssh/known_hosts
+# 3. http[s]:// lists already used svn servers
+# 4. file:// lists files from dir
+# 5. ^/ or protocol except file:/ triggers svn ls
+# this code expects bash 4, $cur is split by : too
+#
+# $1 'all' | 'remote_only'
+# return true if found something
+function _svn_complete_target() {
+ # echo -e "\n_svn_complete_target: [$cur] 1:[${COMP_WORDS[COMP_CWORD]}] 2:[${COMP_WORDS[COMP_CWORD-1]}] 3:[${COMP_WORDS[COMP_CWORD-2]}] | [${COMP_WORDS[@]}] [$COMP_WORDBREAKS]"
+ local prefix=${COMP_WORDS[COMP_CWORD-2]}
+ local colon=${COMP_WORDS[COMP_CWORD-1]}
+ # see about COMP_WORDBREAKS workaround in prop completion
+ if [[ $prefix == "file" && "$colon" == ":" ]]
+ then
+ # file completion for file:// urls
+ COMPREPLY=( $(compgen -d -S '/' -X '*/.*' -- $cur ) )
+ return
+ elif [[ ( $1 == "all" && $cur == ^/* ) || ( "$colon" == ":" && $cur == //*/* ) ]]
+ then # we already have a protocol and host: autocomplete for svn ls ^/bla | svn ls remote_url | svn checkout remote_url
+ local p
+ if [ "$colon" == ":" ] ; then
+ p="$prefix$colon"
+ fi
+ if [[ $cur =~ ((.*/)([^/]*)) ]] # url = everything up to the last /
+ then
+ local url="${BASH_REMATCH[2]}"
+ local path="${BASH_REMATCH[3]}"
+ local remote_files="$(svn ls --non-interactive "$p$url" 2> /dev/null )"
+ COMPREPLY=( $(compgen -P "$url" -W "$remote_files" -- "$path" ) )
+ compopt -o nospace
+ return 0
+ fi
+ elif [[ "$colon" == ":" ]]
+ then
+ # get known servers
+ # svn+ssh://
+ if [[ $prefix == "svn+ssh" && $cur =~ (^//(.*)) ]] ; then
+ local server_start=${BASH_REMATCH[2]}
+ # debian & suse: /usr/share/bash-completion/bash_completion
+ local suffix=/
+ _known_hosts_real -p // "$server_start"
+ else
+ local urls= file=
+ for file in ~/.subversion/auth/svn.simple/* ; do
+ if [ -r $file ] ; then
+ local url=$(_svn_read_hashfile svn:realmstring < $file)
+ url=${url/*</}
+ url=${url/>*/}
+ urls="$urls $url"
+ fi
+ done
+
+ # only suggest/show possible suffixes
+ local suffix=$cur c= choices=
+ for c in $urls ; do
+ [[ $c == $prefix:* ]] && choices="$choices ${c#*:}"
+ done
+
+ COMPREPLY=( $(compgen -W "$choices" -- $suffix ) )
+ fi
+ compopt -o nospace
+ return
+ else
+ # show schemas
+ if [ $1 == 'all' ] ; then
+ COMPREPLY=( $(compgen -W "^/ $urlSchemas" -- $cur) )
+ else
+ COMPREPLY=( $(compgen -W "$urlSchemas" -- $cur) )
+ fi
+ compopt -o nospace
+ return
+ fi
+ #echo "nothing found"
+ return 1
+}
+
+# This completion guides the command/option order along the one suggested
+# by "svn help", although other syntaxes are allowed.
+#
+# - there is a "real" parser to check for what is available and deduce what
+# can be suggested further.
+# - the syntax should be coherent with subversion/svn/{cl.h,main.c}
+# - although it is not a good practice, mixed options and arguments
+# is supported by the completion as it is by the svn command.
+# - the completion works in the middle of a line,
+# but not really in the middle of an argument or option.
+# - property names are completed: see comments about issues related to handling
+# ":" within property names although it is a word completion separator.
+# - unknown properties are assumed to be simple file properties.
+# - --revprop and --revision options are forced to revision properties
+# as they are mandatory in this case.
+# - argument values are suggested to some other options, eg directory names
+# for --config-dir.
+# - values for some options can be extended with environment variables:
+# SVN_BASH_FILE_PROPS: other properties on files/directories
+# SVN_BASH_REV_PROPS: other properties on revisions
+# SVN_BASH_ENCODINGS: encodings to be suggested
+# SVN_BASH_MIME_TYPE: mime types to be suggested
+# SVN_BASH_KEYWORDS: "svn:keywords" substitutions to be suggested
+# SVN_BASH_USERNAME: usernames suggested for --username
+# SVN_BASH_COMPL_EXT: completion extensions for file arguments, based on the
+# current subcommand, so that for instance only modified files are
+# suggested for 'revert', only not svn-managed files for 'add', and so on.
+# Possible values are:
+# - username: guess usernames from ~/.subversion/auth/...
+# - urls: guess urls from ~/.subversion/auth/... or others
+# - svnstatus: use 'svn status' for completion
+# - recurse: allow recursion (expensive)
+# - externals: recurse into externals (very expensive)
+# Former options are reasonable, but beware that both later options
+# may be unadvisable if used on large working copies.
+# None of these costly completions are activated by default.
+# Argument completion outside a working copy results in an error message.
+# Filenames with spaces are not completed properly.
+#
+# TODO
+# - other options?
+# - obsolete options could be removed from auto-comp? (e.g. -N)
+# - obsolete commands could be removed? (e.g. resolved)
+# - completion does not work properly when editing in the middle of the line
+# status/previous are those at the end of the line, not at the entry position
+# - url completion should select more cases where it is relevant
+# - url completion of http:// schemas could suggest sub directories?
+# - add completion for experimental 'obliterate' feature?
+_svn()
+{
+ local cur cmds cmdOpts pOpts mOpts rOpts qOpts nOpts optsParam opt
+
+ COMPREPLY=()
+ cur=${COMP_WORDS[COMP_CWORD]}
+
+ # Possible expansions, without pure-prefix abbreviations such as "up".
+ cmds='add auth blame annotate praise cat changelist cl checkout co cleanup'
+ cmds="$cmds commit ci copy cp delete remove rm diff export help import"
+ cmds="$cmds info list ls lock log merge mergeinfo mkdir move mv rename"
+ cmds="$cmds patch propdel pdel propedit pedit propget pget proplist"
+ cmds="$cmds plist propset pset relocate resolve resolved revert status"
+ cmds="$cmds switch unlock update upgrade"
+ cmds="$cmds shelve shelves unshelve"
+
+ # help options have a strange command status...
+ local helpOpts='--help -h'
+ # all special options that have a command status
+ local specOpts="--version $helpOpts"
+
+ # options that require a parameter
+ # note: continued lines must end '|' continuing lines must start '|'
+ optsParam="-r|--revision|--username|--password|--targets"
+ optsParam="$optsParam|-x|--extensions|-m|--message|-F|--file"
+ optsParam="$optsParam|--encoding|--diff-cmd|--diff3-cmd|--editor-cmd"
+ optsParam="$optsParam|--old|--new|--config-dir|--config-option"
+ optsParam="$optsParam|--native-eol|-l|--limit|-c|--change"
+ optsParam="$optsParam|--depth|--set-depth|--with-revprop"
+ optsParam="$optsParam|--cl|--changelist|--accept|--show-revs"
+ optsParam="$optsParam|--show-item"
+
+ # svn:* and other (env SVN_BASH_*_PROPS) properties
+ local svnProps revProps allProps psCmds propCmds
+
+ # svn and user configured "file" (or directory) properties
+ # the "svn:mergeinfo" prop is not included by default because it is
+ # managed automatically, so there should be no need to edit it by hand.
+ svnProps="svn:keywords svn:executable svn:needs-lock svn:externals
+ svn:ignore svn:eol-style svn:mime-type $SVN_BASH_FILE_PROPS"
+
+ # svn and user configured revision properties
+ revProps="svn:author svn:log svn:date $SVN_BASH_REV_PROPS"
+
+ # all properties as an array variable
+ allProps=( $svnProps $revProps )
+
+ # subcommands that expect property names
+ psCmds='propset|pset|ps'
+ propCmds="$psCmds|propget|pget|pg|propedit|pedit|pe|propdel|pdel|pd"
+
+ # possible URL schemas to access a subversion server
+ local urlSchemas='file:/// http:// https:// svn:// svn+ssh://'
+
+ # Parse arguments and set various variables about what was found.
+ #
+ # cmd: the current command if available
+ # isPropCmd: whether it expects a property name argument
+ # isPsCmd: whether it also expects a property value argument
+ # isHelpCmd: whether it is about help
+ # nExpectArgs: how many arguments are expected by the command
+ # help: help requested about this command (if cmd=='help')
+ # prop: property name (if appropriate)
+ # isRevProp: is it a special revision property
+ # val: property value (if appropriate, under pset)
+ # options: all options encountered
+ # hasRevPropOpt: is --revprop set
+ # hasRevisionOpt: is --revision set
+ # hasRelocateOpt: is --relocate set
+ # hasReintegrateOpt: is --reintegrate set
+ # acceptOpt: the value of --accept
+ # nargs: how many arguments were found
+ # stat: status of parsing at the 'current' word
+ #
+ # prev: previous command in the loop
+ # last: status of last parameter analyzed
+ # i: index
+ local cmd= isPropCmd= isPsCmd= isHelpCmd= nExpectArgs= isCur= i=0
+ local prev= help= prop= val= isRevProp= last='none' nargs=0 stat=
+ local options= hasRevPropOpt= hasRevisionOpt= hasRelocateOpt=
+ local acceptOpt= URL= hasReintegrateOpt=
+
+ for opt in "${COMP_WORDS[@]}"
+ do
+ # get status of current word (from previous iteration)
+ [[ $isCur ]] && stat=$last
+
+ # are we processing the current word
+ isCur=
+ [[ $i -eq $COMP_CWORD ]] && isCur=1
+ let i++
+
+ # FIRST must be the "svn" command
+ [ $last = 'none' ] && { last='first'; continue ; }
+
+ # SKIP option arguments
+ if [[ $prev == @($optsParam) ]] ; then
+
+ # record accept value
+ [[ $prev = '--accept' ]] && acceptOpt=$opt
+
+ prev=''
+ last='skip'
+ continue ;
+ fi
+
+ # Argh... This looks like a bash bug...
+ # Redirections are passed to the completion function
+ # although it is managed by the shell directly...
+ # It matters because we want to tell the user when no more
+ # completion is available, so it does not necessary
+ # fallback to the default case.
+ if [[ $prev == @(<|>|>>|[12]>|[12]>>) ]] ; then
+ prev=''
+ last='skip'
+ continue ;
+ fi
+ prev=$opt
+
+ # get the subCoMmanD
+ if [[ ! $cmd && $opt \
+ && ( $opt != -* || $opt == @(${specOpts// /|}) ) ]]
+ then
+ cmd=$opt
+ [[ $cmd == @($propCmds) ]] && isPropCmd=1
+ [[ $cmd == @($psCmds) ]] && isPsCmd=1
+ [[ $cmd == @(${helpOpts// /|}) ]] && cmd='help'
+ [[ $cmd = 'help' ]] && isHelpCmd=1
+ # HELP about a command asked with an option
+ if [[ $isHelpCmd && $cmd && $cmd != 'help' && ! $help ]]
+ then
+ help=$cmd
+ cmd='help'
+ fi
+ last='cmd'
+ continue
+ fi
+
+ # HELP about a command
+ if [[ $isHelpCmd && ! $help && $opt && $opt != -* ]]
+ then
+ help=$opt
+ last='help'
+ continue
+ fi
+
+ # PROPerty name
+ if [[ $isPropCmd && ! $prop && $opt && $opt != -* ]]
+ then
+ prop=$opt
+ [[ $prop == @(${revProps// /|}) ]] && isRevProp=1
+ last='prop'
+ continue
+ fi
+
+ # property VALue
+ if [[ $isPsCmd && $prop && ! $val && $opt != -* ]] ;
+ then
+ val=$opt
+ last='val'
+ continue
+ fi
+
+ if [[ $last != 'onlyarg' ]]
+ then
+ # more OPTions
+ case $opt in
+ -r|--revision|--revision=*)
+ hasRevisionOpt=1
+ ;;
+ --revprop)
+ hasRevPropOpt=1
+ # restrict to revision properties!
+ allProps=( $revProps )
+ # on revprops, only one URL is expected
+ nExpectArgs=1
+ ;;
+ -h|--help)
+ isHelpCmd=1
+ ;;
+ -F|--file)
+ val='-F'
+ ;;
+ --relocate)
+ hasRelocateOpt=1
+ ;;
+ --reintegrate)
+ hasReintegrateOpt=1
+ ;;
+ esac
+
+ # no more options, only arguments, whatever they look like.
+ if [[ $opt = '--' && ! $isCur ]] ; then
+ last='onlyarg'
+ continue
+ fi
+
+ # options are recorded...
+ if [[ $opt == -* ]] ; then
+ # but not the current one!
+ [[ ! $isCur ]] && options="$options $opt "
+ last='opt'
+ continue
+ fi
+ else
+ # onlyarg
+ let nargs++
+ continue
+ fi
+
+ # then we have an argument
+ if [[ $cmd = 'merge' && ! $URL ]] ; then
+ # first argument is the source URL for the merge
+ URL=$opt
+ fi
+
+ last='arg'
+ let nargs++
+ done
+ # end opt option processing...
+ [[ $stat ]] || stat=$last
+
+ # suggest all subcommands, including special help
+ if [[ ! $cmd || $stat = 'cmd' ]]
+ then
+ COMPREPLY=( $( compgen -W "$cmds $specOpts" -- $cur ) )
+ return 0
+ fi
+
+ # suggest all subcommands
+ if [[ $stat = 'help' || ( $isHelpCmd && ! $help ) ]]
+ then
+ COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
+ return 0
+ fi
+
+ # URL completion
+ if [[ $cmd == @(co|checkout|ls|list) && $stat = 'arg' && \
+ $SVN_BASH_COMPL_EXT == *urls* ]]
+ then
+ if [[ $cmd == @(ls|list) ]] ; then
+ _svn_complete_target 'all' && return
+ else
+ _svn_complete_target 'remote_only' && return
+ fi
+ fi
+
+ if [[ $cmd = 'merge' || $cmd = 'mergeinfo' ]]
+ then
+ local here=$(_svn_info URL)
+ # suggest a possible URL for merging
+ if [[ ! $URL && $stat = 'arg' ]] ; then
+ # we assume a 'standard' repos with branches and trunk
+ if [[ "$here" == */branches/* ]] ; then
+ # we guess that it is a merge from the trunk
+ COMPREPLY=( $(compgen -W ${here/\/branches\/*/\/trunk} -- $cur ) )
+ return 0
+ elif [[ "$here" == */trunk* ]] ; then
+ # we guess that it is a merge from a branch
+ COMPREPLY=( $(compgen -W ${here/\/trunk*/\/branches\/} -- $cur ) )
+ compopt -o nospace
+ return 0
+ else
+ # no se, let us suggest the repository root...
+ COMPREPLY=( $(compgen -W $(_svn_info Root)/ -- $cur ) )
+ compopt -o nospace
+ return 0
+ fi
+ # this part is broken with bash 4 URL contains https only
+ elif [[ $URL == */branches/* && $here == */trunk* && \
+ ! $hasReintegrateOpt && $cur = '' && $stat = 'arg' ]] ; then
+ # force --reintegrate only if the current word is empty
+ COMPREPLY=( $(compgen -W '--reintegrate' -- $cur ) )
+ return 0
+ # autocomplete for svn merge ^/bla
+ else
+ _svn_complete_target 'all' && return
+ fi
+ fi
+
+ # help about option arguments
+ if [[ $stat = 'skip' ]]
+ then
+ local previous=${COMP_WORDS[COMP_CWORD-1]}
+ local values= dirs= beep= exes=
+
+ [[ $previous = '--config-dir' ]] && dirs=1
+
+ # external editor, diff, diff3...
+ [[ $previous = --*-cmd ]] && exes=1
+
+ [[ $previous = '--native-eol' ]] && values='LF CR CRLF'
+
+ # just to suggest that a number is expected. hummm.
+ [[ $previous = '--limit' ]] && values='0 1 2 3 4 5 6 7 8 9'
+
+ # some special partial help about --revision option.
+ [[ $previous = '--revision' || $previous = '-r' ]] && \
+ values='HEAD BASE PREV COMMITTED 0 {'
+
+ [[ $previous = '--encoding' ]] && \
+ values="latin1 utf8 $SVN_BASH_ENCODINGS"
+
+ [[ $previous = '--extensions' || $previous = '-x' ]] && \
+ values="--unified --ignore-space-change \
+ --ignore-all-space --ignore-eol-style --show-c-functions"
+
+ [[ $previous = '--depth' ]] && \
+ values='empty files immediates infinity'
+
+ [[ $previous = '--set-depth' ]] && \
+ values='empty exclude files immediates infinity'
+
+ [[ $previous = '--accept' ]] && \
+ {
+ # the list is different for 'resolve'
+ if [[ $cmd = 'resolve' ]] ; then
+ # from svn help resolve
+ values='base working mine-full theirs-full'
+ else # checkout merge switch update
+ values="postpone base mine-full theirs-full edit launch \
+ mine-conflict theirs-conflict"
+ fi
+ }
+
+ [[ $previous = '--show-revs' ]] && values='merged eligible'
+
+ [[ $previous = '--show-item' ]] && values="kind url relative-url \
+ repos-root-url repos-uuid revision last-changed-revision \
+ last-changed-date last-changed-author wc-root"
+
+ if [[ $previous = '--username' ]] ; then
+ values="$SVN_BASH_USERNAME"
+ if [[ $SVN_BASH_COMPL_EXT == *username* ]] ; then
+ local file=
+ # digest? others?
+ for file in ~/.subversion/auth/svn.simple/* ; do
+ if [ -r $file ] ; then
+ values="$values $(_svn_read_hashfile username < $file)"
+ fi
+ done
+ fi
+ [[ ! "$values" ]] && beep=1
+ fi
+
+ # could look at ~/.subversion/ ?
+ # hmmm... this option should not exist
+ [[ $previous = '--password' ]] && beep=1
+
+ # TODO: provide help about other options such as:
+ # --old --new --with-revprop
+
+ # if the previous option required a parameter, do something
+ # or fallback on ordinary filename expansion
+ [[ $values ]] && COMPREPLY=( $( compgen -W "$values" -- $cur ) )
+ [[ $dirs ]] && COMPREPLY=( $( compgen -o dirnames -- $cur ) )
+ [[ $exes ]] && COMPREPLY=( $( compgen -c -- $cur ) )
+ [[ $beep ]] &&
+ {
+ # 'no known completion'. hummm.
+ echo -en "\a"
+ COMPREPLY=( '' )
+ }
+ return 0
+ fi
+
+ # provide allowed property names after property commands
+ if [[ $isPropCmd && ( ! $prop || $stat = 'prop' ) && $cur != -* ]]
+ then
+ #
+ # Ok, this part is pretty ugly.
+ #
+ # The issue is that ":" is a completion word separator,
+ # which is a good idea for file:// urls but not within
+ # property names...
+ #
+ # The first idea was to remove locally ":" from COMP_WORDBREAKS
+ # and then put it back in all cases but in property name
+ # completion. It does not always work. There is a strange bug
+ # where one may get "svn:svn:xxx" in some unclear cases.
+ #
+ # Thus the handling is reprogrammed here...
+ # The code assumes that property names look like *:*,
+ # but it also works reasonably well with simple names.
+ #
+ # This hack is broken in bash4... not sure what to do about it,
+ # especially while keeping the bash3 compatibility:-(
+ local choices=
+
+ if [[ $cur == *:* ]]
+ then
+ # only suggest/show possible suffixes
+ local prefix=${cur%:*} suffix=${cur#*:} c=
+ for c in ${allProps[@]} ; do
+ [[ $c == $prefix:* ]] && choices="$choices ${c#*:}"
+ done
+ # everything will be appended to the prefix because ':' is
+ # a separator, so cur is restricted to the suffix part.
+ cur=$suffix
+ else
+ # only one choice is fine
+ COMPREPLY=( $( compgen -W "${allProps[*]}" -- $cur ) )
+ [ ${#COMPREPLY[@]} -eq 1 ] && return 0
+
+ # no ':' so only suggest prefixes?
+ local seen= n=0 last= c=
+ for c in ${allProps[@]%:*} ; do
+ # do not put the same prefix twice...
+ if [[ $c == $cur* && ( ! $seen || $c != @($seen) ) ]]
+ then
+ let n++
+ last=$c
+ choices="$choices $c:"
+ if [[ $seen ]]
+ then
+ seen="$seen|$c*"
+ else
+ seen="$c*"
+ fi
+ fi
+ done
+
+ # supply two choices to force a partial completion and a beep
+ [[ $n -eq 1 ]] && choices="$last:1 $last:2"
+ fi
+
+ COMPREPLY=( $( compgen -W "$choices" -- $cur ) )
+ return 0
+ fi
+
+ # force mandatory --revprop option on revision properties
+ if [[ $isRevProp && ! $hasRevPropOpt ]]
+ then
+ COMPREPLY=( $( compgen -W '--revprop' -- $cur ) )
+ return 0
+ fi
+
+ # force mandatory --revision option on revision properties
+ if [[ $isRevProp && $hasRevPropOpt && ! $hasRevisionOpt ]]
+ then
+ COMPREPLY=( $( compgen -W '--revision' -- $cur ) )
+ return 0
+ fi
+
+ # possible completion when setting property values
+ if [[ $isPsCmd && $prop && ( ! $val || $stat = 'val' ) ]]
+ then
+ # ' is a reminder for an arbitrary value
+ local values="\' --file"
+ case $prop in
+ svn:keywords)
+ # just a subset?
+ values="Id Rev URL Date Author Header \' $SVN_BASH_KEYWORDS"
+ ;;
+ svn:executable|svn:needs-lock)
+ # hmmm... canonical value * is special to the shell.
+ values='\\*'
+ ;;
+ svn:eol-style)
+ values='native LF CR CRLF'
+ ;;
+ svn:mime-type)
+ # could read /etc/mime.types if available. overkill.
+ values="text/ text/plain text/html text/xml text/rtf
+ image/ image/png image/gif image/jpeg image/tiff
+ audio/ audio/midi audio/mpeg
+ video/ video/mpeg video/mp4
+ application/ application/octet-stream
+ $SVN_BASH_MIME_TYPE"
+ ;;
+ esac
+
+ COMPREPLY=( $( compgen -W "$values" -- $cur ) )
+ # special case for --file... return even if within an option
+ [[ ${COMPREPLY} ]] && return 0
+ fi
+
+ # maximum number of additional arguments expected in various forms
+ case $cmd in
+ merge)
+ nExpectArgs=3
+ ;;
+ mergeinfo)
+ nExpectArgs=1
+ ;;
+ copy|cp|move|mv|rename|ren|export|import)
+ nExpectArgs=2
+ ;;
+ switch|sw)
+ [[ ! $hasRelocateOpt ]] && nExpectArgs=2
+ ;;
+ help|h)
+ nExpectArgs=0
+ ;;
+ --version)
+ nExpectArgs=0
+ ;;
+ esac
+
+ # the maximum number of arguments is reached for a command
+ if [[ $nExpectArgs && $nargs -gt $nExpectArgs ]]
+ then
+ # some way to tell 'no completion at all'... is there a better one?
+ # Do not say 'file completion' here.
+ echo -en "\a"
+ COMPREPLY=( '' )
+ return 0
+ fi
+
+ # if not typing an option,
+ # then fallback on filename expansion...
+ if [[ $cur != -* || $stat = 'onlyarg' ]] ; then
+
+ # do we allow possible expensive completion here?
+ if [[ $SVN_BASH_COMPL_EXT == *svnstatus* ]] ; then
+
+ # build status command and options
+ # "--quiet" removes 'unknown' files
+ local status='svn status --non-interactive'
+
+ [[ $SVN_BASH_COMPL_EXT == *recurse* ]] || \
+ status="$status --non-recursive"
+
+ # I'm not sure that it can work with externals in call cases
+ # the output contains translatable sentences (even with quiet)
+ [[ $SVN_BASH_COMPL_EXT == *externals* ]] || \
+ status="$status --ignore-externals"
+
+ local cs= files=
+ # subtlety: must not set $cur* if $cur is empty in some cases
+ [[ $cur ]] && cs=$cur*
+
+ # 'files' is set according to the current subcommand
+ case $cmd in
+ st*) # status completion must include all files
+ files=$cur*
+ ;;
+ ci|commit|revert|di*) # anything edited
+ files=$($status $cs| _svn_grcut '@([MADR!]*| M*|_M*)')
+ ;;
+ add) # unknown files
+ files=$($status $cs| _svn_grcut '\?*')
+ ;;
+ unlock) # unlock locked files
+ files=$($status $cs| _svn_grcut '@(??L*|?????[KOTB]*)')
+ ;;
+ resolve*) # files in conflict
+ files=$($status $cs| _svn_grcut '@(?C*|C*)')
+ ;;
+ praise|blame|ann*) # any svn file but added
+ files=$( _svn_lls all $cur* )
+ ;;
+ p*) # prop commands
+ if [[ $cmd == @($propCmds) && \
+ $prop == @(svn:ignore|svn:externals) ]] ; then
+ # directory specific props
+ files=$( _svn_lls dir . $cur* )
+ else
+ # ??? added directories appear twice: foo foo/
+ files="$( _svn_lls all $cur* )
+ $($status $cs | _svn_grcut 'A*' )"
+ fi
+ ;;
+ info) # information on any file
+ files="$( _svn_lls all $cur* )
+ $($status $cs | _svn_grcut 'A*' )"
+ ;;
+ remove|rm|del*|move|mv|rename) # changing existing files
+ files=$( _svn_lls all $cur* )
+ ;;
+ mkdir) # completion in mkdir can only be for subdirs?
+ files=$( _svn_lls dir $cur* )
+ ;;
+ log|lock|up*|cl*|switch) # misc, all but added files
+ files=$( _svn_lls all $cur* )
+ ;;
+ merge) # may do a better job? URL/WCPATH
+ files=$( _svn_lls all $cur* )
+ ;;
+ ls|list) # better job? what about URLs?
+ files=$( _svn_lls all $cur* )
+ ;;
+ *) # other commands: changelist export import cat mergeinfo
+ local fallback=1
+ ;;
+ esac
+
+ # when not recursive, some relevant files may exist
+ # within subdirectories, so they are added here.
+ # should it be restricted to svn-managed subdirs? no??
+ if [[ $SVN_BASH_COMPL_EXT != *recurse* ]] ; then
+ files="$files $( _svn_lls dir $cur* )"
+ fi
+
+ # set completion depending on computed 'files'
+ if [[ $files ]] ; then
+ COMPREPLY=( $( compgen -W "$files" -- $cur ) )
+ # if empty, set to nope?
+ [[ "${COMPREPLY[*]}" ]] || COMPREPLY=( '' )
+ elif [[ ! $fallback ]] ; then
+ # this suggests no completion...
+ echo -en "\a"
+ COMPREPLY=( '' )
+ fi
+ fi
+ # else fallback to ordinary filename completion...
+ return 0
+ fi
+
+ # otherwise build possible options for the command
+ pOpts="--username --password --no-auth-cache --non-interactive \
+ --trust-server-cert-failures \
+ --force-interactive"
+ mOpts="-m --message -F --file --encoding --force-log --with-revprop"
+ rOpts="-r --revision"
+ qOpts="-q --quiet"
+ nOpts="-N --non-recursive --depth"
+ gOpts="-g --use-merge-history"
+ cOpts="--cl --changelist"
+
+ cmdOpts=
+ case $cmd in
+ --version)
+ cmdOpts="$qOpts"
+ ;;
+ add)
+ cmdOpts="--auto-props --no-auto-props --force --targets \
+ --no-ignore --parents $nOpts $qOpts $pOpts"
+ ;;
+ auth)
+ cmdOpts="--remove --show-passwords $pOpts"
+ ;;
+ blame|annotate|ann|praise)
+ cmdOpts="$rOpts $pOpts -v --verbose --incremental --xml \
+ -x --extensions --force $gOpts"
+ ;;
+ cat)
+ cmdOpts="$rOpts $pOpts --ignore-keywords"
+ ;;
+ changelist|cl)
+ cmdOpts="--targets $pOpts $qOpts $cOpts \
+ -R --recursive --depth --remove"
+ ;;
+ checkout|co)
+ cmdOpts="$rOpts $qOpts $nOpts $pOpts --ignore-externals \
+ --force"
+ ;;
+ cleanup)
+ cmdOpts="$pOpts --include-externals -q --quiet\
+ --remove-ignored --remove-unversioned --vacuum-pristines"
+ ;;
+ commit|ci)
+ cmdOpts="$mOpts $qOpts $nOpts --targets --editor-cmd $pOpts \
+ --no-unlock $cOpts --keep-changelists \
+ --include-externals"
+ ;;
+ copy|cp)
+ cmdOpts="$mOpts $rOpts $qOpts --editor-cmd $pOpts --parents \
+ --ignore-externals --pin-externals"
+ ;;
+ delete|del|remove|rm)
+ cmdOpts="--force $mOpts $qOpts --targets --editor-cmd $pOpts \
+ --keep-local"
+ ;;
+ diff|di)
+ cmdOpts="$rOpts -x --extensions --diff-cmd --no-diff-deleted \
+ $nOpts $pOpts --force --old --new --notice-ancestry \
+ -c --change --summarize $cOpts --xml --git \
+ --internal-diff --show-copies-as-adds \
+ --ignore-properties --properties-only --no-diff-added \
+ --patch-compatible"
+ ;;
+ export)
+ cmdOpts="$rOpts $qOpts $pOpts $nOpts --force --native-eol \
+ --ignore-externals --ignore-keywords"
+ ;;
+ help|h|\?)
+ cmdOpts=
+ ;;
+ import)
+ cmdOpts="--auto-props --no-auto-props $mOpts $qOpts $nOpts \
+ --no-ignore --editor-cmd $pOpts --force"
+ ;;
+ info)
+ cmdOpts="$pOpts $rOpts --targets -R --recursive --depth \
+ --include-externals --incremental --xml \
+ --show-item --no-newline $cOpts"
+ ;;
+ list|ls)
+ cmdOpts="$rOpts -v --verbose -R --recursive $pOpts \
+ --incremental --search --xml --depth \
+ --include-externals"
+ ;;
+ lock)
+ cmdOpts="-m --message -F --file --encoding --force-log \
+ $qOpts --targets --force $pOpts"
+ ;;
+ log)
+ cmdOpts="$rOpts -v --verbose --targets $pOpts --stop-on-copy \
+ --incremental --xml $qOpts -l --limit -c --change \
+ $gOpts --with-all-revprops --with-revprop --depth \
+ --diff --diff-cmd -x --extensions --internal-diff \
+ --with-no-revprops --search --search-and"
+ ;;
+ merge)
+ cmdOpts="$rOpts $nOpts $qOpts --force --dry-run --diff3-cmd \
+ $pOpts --ignore-ancestry -c --change -x --extensions \
+ --record-only --accept \
+ --allow-mixed-revisions -v --verbose"
+ ;;
+ mergeinfo)
+ cmdOpts="$rOpts $pOpts --depth --show-revs -R --recursive \
+ $qOpts -v --verbose --incremental --log"
+ ;;
+ mkdir)
+ cmdOpts="$mOpts $qOpts --editor-cmd $pOpts --parents"
+ ;;
+ move|mv|rename|ren)
+ cmdOpts="$mOpts $qOpts --force --editor-cmd $pOpts \
+ --parents --allow-mixed-revisions"
+ ;;
+ patch)
+ cmdOpts="$qOpts $pOpts --dry-run --ignore-whitespace \
+ --reverse-diff --strip"
+ ;;
+ propdel|pdel|pd)
+ cmdOpts="$qOpts -R --recursive $rOpts $pOpts $cOpts \
+ --depth"
+ [[ $isRevProp || ! $prop ]] && cmdOpts="$cmdOpts --revprop"
+ ;;
+ propedit|pedit|pe)
+ cmdOpts="--editor-cmd $pOpts $mOpts --force"
+ [[ $isRevProp || ! $prop ]] && \
+ cmdOpts="$cmdOpts --revprop $rOpts"
+ ;;
+ propget|pget|pg)
+ cmdOpts="-v --verbose -R --recursive $rOpts --no-newline \
+ $pOpts $cOpts --depth --xml --show-inherited-props"
+ [[ $isRevProp || ! $prop ]] && cmdOpts="$cmdOpts --revprop"
+ ;;
+ proplist|plist|pl)
+ cmdOpts="-v --verbose -R --recursive $rOpts --revprop $qOpts \
+ $pOpts $cOpts --depth --xml --show-inherited-props"
+ ;;
+ propset|pset|ps)
+ cmdOpts="$qOpts --targets -R --recursive \
+ --encoding $pOpts --force $cOpts --depth"
+ [[ $isRevProp || ! $prop ]] && \
+ cmdOpts="$cmdOpts --revprop $rOpts"
+ [[ $val ]] || cmdOpts="$cmdOpts -F --file"
+ ;;
+ relocate)
+ cmdOpts="--ignore-externals $pOpts"
+ ;;
+ resolve)
+ cmdOpts="--targets -R --recursive $qOpts $pOpts --accept \
+ --depth"
+ ;;
+ resolved)
+ cmdOpts="--targets -R --recursive $qOpts $pOpts --depth"
+ ;;
+ revert)
+ cmdOpts="--targets -R --recursive $qOpts $cOpts \
+ --depth $pOpts"
+ ;;
+ status|stat|st)
+ cmdOpts="-u --show-updates -v --verbose $nOpts $qOpts $pOpts \
+ --no-ignore --ignore-externals --incremental --xml \
+ $rOpts $cOpts"
+ ;;
+ switch|sw)
+ cmdOpts="$rOpts $nOpts $qOpts $pOpts --diff3-cmd \
+ --force --accept --ignore-externals --set-depth \
+ --ignore-ancestry"
+ ;;
+ unlock)
+ cmdOpts="$qOpts --targets --force $pOpts"
+ ;;
+ update|up)
+ cmdOpts="$rOpts $nOpts $qOpts $pOpts --diff3-cmd \
+ --ignore-externals --force --accept $cOpts \
+ --parents --editor-cmd --set-depth \
+ --adds-as-modification"
+ ;;
+ upgrade)
+ cmdOpts="$qOpts $pOpts"
+ ;;
+ shelve)
+ cmdOpts="$qOpts --keep-local --delete --list -m --message -F --file --encoding --force-log --editor-cmd --dry-run --depth --targets $cOpts $pOpts"
+ ;;
+ unshelve)
+ cmdOpts="$qOpts --keep-shelved --list --dry-run $pOpts"
+ ;;
+ shelves)
+ cmdOpts="$qOpts $pOpts"
+ ;;
+ *)
+ ;;
+ esac
+
+ # add options that are nearly always available
+ [[ "$cmd" != "--version" ]] && cmdOpts="$cmdOpts $helpOpts"
+ cmdOpts="$cmdOpts --config-dir --config-option"
+
+ # --accept (edit|launch) incompatible with --non-interactive
+ if [[ $acceptOpt == @(edit|launch) ]] ;
+ then
+ cmdOpts=${cmdOpts/ --non-interactive / }
+ fi
+
+ # take out options already given
+ for opt in $options
+ do
+ local optBase
+
+ # remove leading dashes and arguments
+ case $opt in
+ --*) optBase=${opt/=*/} ;;
+ -*) optBase=${opt:0:2} ;;
+ esac
+
+ cmdOpts=" $cmdOpts "
+ cmdOpts=${cmdOpts/ ${optBase} / }
+
+ # take out alternatives and mutually exclusives
+ case $optBase in
+ -v) cmdOpts=${cmdOpts/ --verbose / } ;;
+ --verbose) cmdOpts=${cmdOpts/ -v / } ;;
+ -N) cmdOpts=${cmdOpts/ --non-recursive / } ;;
+ --non-recursive) cmdOpts=${cmdOpts/ -N / } ;;
+ -R) cmdOpts=${cmdOpts/ --recursive / } ;;
+ --recursive) cmdOpts=${cmdOpts/ -R / } ;;
+ -x) cmdOpts=${cmdOpts/ --extensions / } ;;
+ --extensions) cmdOpts=${cmdOpts/ -x / } ;;
+ -q) cmdOpts=${cmdOpts/ --quiet / } ;;
+ --quiet) cmdOpts=${cmdOpts/ -q / } ;;
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ -l) cmdOpts=${cmdOpts/ --limit / } ;;
+ --limit) cmdOpts=${cmdOpts/ -l / } ;;
+ -r) cmdOpts=${cmdOpts/ --revision / } ;;
+ --revision) cmdOpts=${cmdOpts/ -r / } ;;
+ -c) cmdOpts=${cmdOpts/ --change / } ;;
+ --change) cmdOpts=${cmdOpts/ -c / } ;;
+ --auto-props) cmdOpts=${cmdOpts/ --no-auto-props / } ;;
+ --no-auto-props) cmdOpts=${cmdOpts/ --auto-props / } ;;
+ -g) cmdOpts=${cmdOpts/ --use-merge-history / } ;;
+ --use-merge-history)
+ cmdOpts=${cmdOpts/ -g / } ;;
+ -m|--message|-F|--file)
+ cmdOpts=${cmdOpts/ --message / }
+ cmdOpts=${cmdOpts/ -m / }
+ cmdOpts=${cmdOpts/ --file / }
+ cmdOpts=${cmdOpts/ -F / }
+ ;;
+ esac
+
+ # remove help options within help subcommand
+ if [ $isHelpCmd ] ; then
+ cmdOpts=${cmdOpts/ -h / }
+ cmdOpts=${cmdOpts/ --help / }
+ fi
+ done
+
+ # provide help about available options
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+ return 0
+}
+complete -F _svn -o default -X '@(*/.svn|*/.svn/|.svn|.svn/)' svn
+
+_svnadmin ()
+{
+ local cur cmds cmdOpts optsParam opt helpCmds optBase i
+
+ COMPREPLY=()
+ cur=${COMP_WORDS[COMP_CWORD]}
+
+ # Possible expansions, without pure-prefix abbreviations such as "h".
+ cmds='crashtest create delrevprop deltify dump dump-revprops freeze \
+ help hotcopy info list-dblogs list-unused-dblogs \
+ load load-revprops lock lslocks lstxns pack recover rmlocks \
+ rmtxns setlog setrevprop setuuid unlock upgrade verify --version'
+
+ if [[ $COMP_CWORD -eq 1 ]] ; then
+ COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
+ return 0
+ fi
+
+ # options that require a parameter
+ # note: continued lines must end '|' continuing lines must start '|'
+ optsParam="-r|--revision|--parent-dir|--fs-type|-M|--memory-cache-size"
+ optsParam="$optsParam|-F|--file|--exclude|--include"
+
+ # if not typing an option, or if the previous option required a
+ # parameter, then fallback on ordinary filename expansion
+ helpCmds='help|--help|h|\?'
+ if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \
+ [[ "$cur" != -* ]] || \
+ [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then
+ return 0
+ fi
+
+ cmdOpts=
+ case ${COMP_WORDS[1]} in
+ create)
+ cmdOpts="--bdb-txn-nosync --bdb-log-keep --config-dir \
+ --fs-type --compatible-version"
+ ;;
+ deltify)
+ cmdOpts="-r --revision -q --quiet -M --memory-cache-size"
+ ;;
+ dump)
+ cmdOpts="-r --revision --incremental -q --quiet --deltas \
+ -M --memory-cache-size -F --file \
+ --exclude --include --pattern"
+ ;;
+ dump-revprops)
+ cmdOpts="-r --revision -q --quiet -F --file"
+ ;;
+ freeze)
+ cmdOpts="-F --file"
+ ;;
+ help|h|\?)
+ cmdOpts="$cmds"
+ ;;
+ hotcopy)
+ cmdOpts="--clean-logs --incremental -q --quiet"
+ ;;
+ load)
+ cmdOpts="--ignore-uuid --force-uuid --parent-dir -q --quiet \
+ --use-pre-commit-hook --use-post-commit-hook \
+ --bypass-prop-validation -M --memory-cache-size \
+ --no-flush-to-disk --normalize-props -F --file \
+ --ignore-dates -r --revision"
+ ;;
+ load-revprops)
+ cmdOpts="-r --revision -q --quiet -F --file \
+ --bypass-prop-validation --normalize-props \
+ --force-uuid --no-flush-to-disk"
+ ;;
+ lstxns)
+ cmdOpts="-r --revision"
+ ;;
+ lock|unlock)
+ cmdOpts="--bypass-hooks -q --quiet"
+ ;;
+ pack)
+ cmdOpts="-M --memory-cache-size -q --quiet"
+ ;;
+ recover)
+ cmdOpts="--wait"
+ ;;
+ rmlocks)
+ cmdOpts="-q --quiet"
+ ;;
+ rmtxns)
+ cmdOpts="-q --quiet"
+ ;;
+ setlog)
+ cmdOpts="-r --revision --bypass-hooks"
+ ;;
+ setrevprop|delrevprop)
+ cmdOpts="-r --revision -t --transaction \
+ --use-pre-revprop-change-hook \
+ --use-post-revprop-change-hook"
+ ;;
+ verify)
+ cmdOpts="-r --revision -t --transaction -q --quiet \
+ --check-normalization --keep-going \
+ -M --memory-cache-size --metadata-only"
+ ;;
+ *)
+ ;;
+ esac
+
+ cmdOpts="$cmdOpts --help -h"
+
+ # take out options already given
+ for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do
+ opt=${COMP_WORDS[$i]}
+
+ case $opt in
+ --*) optBase=${opt/=*/} ;;
+ -*) optBase=${opt:0:2} ;;
+ esac
+
+ cmdOpts=" $cmdOpts "
+ cmdOpts=${cmdOpts/ ${optBase} / }
+
+ # take out alternatives
+ case $optBase in
+ -q) cmdOpts=${cmdOpts/ --quiet / } ;;
+ --quiet) cmdOpts=${cmdOpts/ -q / } ;;
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ -r) cmdOpts=${cmdOpts/ --revision / } ;;
+ --revision) cmdOpts=${cmdOpts/ -r / } ;;
+ -t) cmdOpts=${cmdOpts/ --transaction / } ;;
+ --transaction) cmdOpts=${cmdOpts/ -t / } ;;
+ -F) cmdOpts=${cmdOpts/ --file / } ;;
+ --file) cmdOpts=${cmdOpts/ -F / } ;;
+ -M) cmdOpts=${cmdOpts/ --memory-cache-size / } ;;
+ --memory-cache-size) cmdOpts=${cmdOpts/ --M / } ;;
+ esac
+
+ # skip next option if this one requires a parameter
+ if [[ $opt == @($optsParam) ]] ; then
+ ((++i))
+ fi
+ done
+
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+
+ return 0
+}
+complete -F _svnadmin -o default svnadmin
+
+_svndumpfilter ()
+{
+ local cur cmds cmdOpts optsParam opt helpCmds optBase i
+
+ COMPREPLY=()
+ cur=${COMP_WORDS[COMP_CWORD]}
+
+ # Possible expansions, without pure-prefix abbreviations such as "h".
+ cmds='exclude help include --version'
+
+ if [[ $COMP_CWORD -eq 1 ]] ; then
+ COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
+ return 0
+ fi
+
+ # options that require a parameter
+ # note: continued lines must end '|' continuing lines must start '|'
+ optsParam="--targets"
+
+ # if not typing an option, or if the previous option required a
+ # parameter, then fallback on ordinary filename expansion
+ helpCmds='help|--help|h|\?'
+ if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \
+ [[ "$cur" != -* ]] || \
+ [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then
+ return 0
+ fi
+
+ cmdOpts=
+ case ${COMP_WORDS[1]} in
+ exclude|include)
+ cmdOpts="--drop-empty-revs --drop-all-empty-revs --renumber-revs
+ --skip-missing-merge-sources --targets
+ --preserve-revprops --quiet --pattern"
+ ;;
+ help|h|\?)
+ cmdOpts="$cmds"
+ ;;
+ *)
+ ;;
+ esac
+
+ cmdOpts="$cmdOpts --help -h"
+
+ # take out options already given
+ for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do
+ opt=${COMP_WORDS[$i]}
+
+ case $opt in
+ --*) optBase=${opt/=*/} ;;
+ -*) optBase=${opt:0:2} ;;
+ esac
+
+ cmdOpts=" $cmdOpts "
+ cmdOpts=${cmdOpts/ ${optBase} / }
+
+ # take out alternatives
+ case $optBase in
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ esac
+
+ # skip next option if this one requires a parameter
+ if [[ $opt == @($optsParam) ]] ; then
+ ((++i))
+ fi
+ done
+
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+
+ return 0
+}
+complete -F _svndumpfilter -o default svndumpfilter
+
+_svnlook ()
+{
+ local cur cmds cmdOpts optsParam opt helpCmds optBase i
+
+ COMPREPLY=()
+ cur=${COMP_WORDS[COMP_CWORD]}
+
+ # Possible expansions, without pure-prefix abbreviations such as "h".
+ cmds='author cat changed date diff dirs-changed filesize help history \
+ info lock log propget proplist tree uuid youngest --version'
+
+ if [[ $COMP_CWORD -eq 1 ]] ; then
+ COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
+ return 0
+ fi
+
+ # options that require a parameter
+ # note: continued lines must end '|' continuing lines must start '|'
+ optsParam="-r|--revision|-t|--transaction|-l|--limit|-x|--extensions"
+
+ # if not typing an option, or if the previous option required a
+ # parameter, then fallback on ordinary filename expansion
+ helpCmds='help|--help|h|\?'
+ if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \
+ [[ "$cur" != -* ]] || \
+ [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then
+ return 0
+ fi
+
+ cmdOpts=
+ case ${COMP_WORDS[1]} in
+ author)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ cat)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ changed)
+ cmdOpts="-r --revision -t --transaction --copy-info"
+ ;;
+ date)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ diff)
+ cmdOpts="-r --revision -t --transaction --diff-copy-from \
+ --no-diff-added --no-diff-deleted -x --extensions"
+ ;;
+ dirs-changed)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ filesize)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ help|h|\?)
+ cmdOpts="$cmds"
+ ;;
+ history)
+ cmdOpts="-r --revision -l --limit --show-ids"
+ ;;
+ info)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ lock)
+ cmdOpts=
+ ;;
+ log)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
+ propget|pget|pg)
+ cmdOpts="-r --revision -t --transaction --revprop"
+ ;;
+ proplist|plist|pl)
+ cmdOpts="-r --revision -t --transaction --revprop -v --verbose --xml"
+ ;;
+ tree)
+ cmdOpts="-r --revision -t --transaction --full-paths -N --non-recursive --show-ids"
+ ;;
+ uuid)
+ cmdOpts=
+ ;;
+ youngest)
+ cmdOpts=
+ ;;
+ *)
+ ;;
+ esac
+
+ cmdOpts="$cmdOpts --help -h"
+
+ # take out options already given
+ for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do
+ opt=${COMP_WORDS[$i]}
+
+ case $opt in
+ --*) optBase=${opt/=*/} ;;
+ -*) optBase=${opt:0:2} ;;
+ esac
+
+ cmdOpts=" $cmdOpts "
+ cmdOpts=${cmdOpts/ ${optBase} / }
+
+ # take out alternatives
+ case $optBase in
+ -N) cmdOpts=${cmdOpts/ --non-recursive / } ;;
+ --non-recursive) cmdOpts=${cmdOpts/ -N / } ;;
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ -l) cmdOpts=${cmdOpts/ --limit / } ;;
+ --limit) cmdOpts=${cmdOpts/ -l / } ;;
+ -r) cmdOpts=${cmdOpts/ --revision / } ;;
+ --revision) cmdOpts=${cmdOpts/ -r / } ;;
+ -t) cmdOpts=${cmdOpts/ --transaction / } ;;
+ --transaction) cmdOpts=${cmdOpts/ -t / } ;;
+ -v) cmdOpts=${cmdOpts/ --verbose / } ;;
+ --verbose) cmdOpts=${cmdOpts/ -v / } ;;
+ -x) cmdOpts=${cmdOpts/ --extensions / } ;;
+ --extensions) cmdOpts=${cmdOpts/ -x / } ;;
+ esac
+
+ # skip next option if this one requires a parameter
+ if [[ $opt == @($optsParam) ]] ; then
+ ((++i))
+ fi
+ done
+
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+
+ return 0
+}
+complete -F _svnlook -o default svnlook
+
+_svnsync ()
+{
+ local cur cmds cmdOpts optsParam opt helpCmds optBase i
+
+ COMPREPLY=()
+ cur=${COMP_WORDS[COMP_CWORD]}
+
+ # Possible expansions, without pure-prefix abbreviations such as "h".
+ cmds='copy-revprops help info initialize synchronize --version'
+
+ if [[ $COMP_CWORD -eq 1 ]] ; then
+ COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
+ return 0
+ fi
+
+ # options that require a parameter
+ # note: continued lines must end '|' continuing lines must start '|'
+ optsParam="--config-dir|--config-option|--source-username|--source-password"
+ optsParam="$optsParam|--sync-username|--sync-password"
+
+ # if not typing an option, or if the previous option required a
+ # parameter, then fallback on ordinary filename expansion
+ helpCmds='help|--help|h|\?'
+ if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \
+ [[ "$cur" != -* ]] || \
+ [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then
+ return 0
+ fi
+
+ cmdOpts=
+ case ${COMP_WORDS[1]} in
+ copy-revprops|initialize|init|synchronize|sync)
+ cmdOpts="--non-interactive --no-auth-cache --trust-server-cert \
+ --source-username --source-password --sync-username \
+ --sync-password --config-dir --config-option \
+ -q --quiet -M --memory-cache-size"
+ ;;
+ help|h|\?)
+ cmdOpts="$cmds"
+ ;;
+ info)
+ cmdOpts="--non-interactive --no-auth-cache --trust-server-cert \
+ --source-username --source-password --sync-username \
+ --sync-password --config-dir --config-option"
+ ;;
+ *)
+ ;;
+ esac
+
+ cmdOpts="$cmdOpts --help -h"
+
+ # take out options already given
+ for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do
+ opt=${COMP_WORDS[$i]}
+
+ case $opt in
+ --*) optBase=${opt/=*/} ;;
+ -*) optBase=${opt:0:2} ;;
+ esac
+
+ cmdOpts=" $cmdOpts "
+ cmdOpts=${cmdOpts/ ${optBase} / }
+
+ # take out alternatives
+ case $optBase in
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ -q) cmdOpts=${cmdOpts/ --quiet / } ;;
+ --quiet) cmdOpts=${cmdOpts/ -q / } ;;
+ esac
+
+ # skip next option if this one requires a parameter
+ if [[ $opt == @($optsParam) ]] ; then
+ ((++i))
+ fi
+ done
+
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+
+ return 0
+}
+complete -F _svnsync -o default svnsync
+
+# reasonable completion for 'svnversion'
+_svnversion ()
+{
+ local cmdOpts=" -n --no-newline -c --committed -h --help --version "
+ local cur=${COMP_WORDS[COMP_CWORD]}
+
+ COMPREPLY=()
+
+ # parse current options
+ local options= wcpath= trailurl= last='none' stat= opt= i=-1 isCur=
+ for opt in ${COMP_WORDS[@]}
+ do
+ [[ $i -eq $COMP_CWORD ]] && stat=$last
+ let i++
+
+ # are we processing the current word?
+ isCur=
+ [[ $i -eq $COMP_CWORD ]] && isCur=1
+
+ # skip first command, should be 'svnversion'
+ if [ $last = 'none' ] ; then
+ last='first'
+ continue
+ fi
+
+ # get options
+ if [[ $last != 'arg' && $opt == -* ]]
+ then
+ # if '--' is at the current position, it means that we are looking
+ # for '--*' options, and not the end of option processing.
+ if [[ $opt = '--' && ! $isCur ]]
+ then
+ last='arg'
+ else
+ options="$options $opt "
+ last='opt'
+ fi
+ continue
+ fi
+ # get arguments
+ if [[ $opt != -* ]]
+ then
+ last='arg'
+ if [[ ! $wcpath ]]
+ then
+ wcpath=$opt
+ elif [[ ! $trailurl ]]
+ then
+ trailurl=$opt
+ fi
+ fi
+ done
+ [[ $stat ]] || stat=$last
+
+ # argument part
+ if [[ $cur != -* || $stat = 'arg' ]]
+ then
+ [[ $wcpath && $trailurl ]] && COMPREPLY=( '' )
+ return 0
+ fi
+
+ # suggest options, and take out already given options
+ for opt in $options
+ do
+ # take out options
+ cmdOpts=${cmdOpts/ $opt / }
+
+ # take out alternatives
+ case $opt in
+ -n) cmdOpts=${cmdOpts/ --no-newline / } ;;
+ --no-newline) cmdOpts=${cmdOpts/ -n / } ;;
+ -h) cmdOpts=${cmdOpts/ --help / } ;;
+ --help) cmdOpts=${cmdOpts/ -h / } ;;
+ -c) cmdOpts=${cmdOpts/ --committed / } ;;
+ --committed) cmdOpts=${cmdOpts/ -c / } ;;
+ esac
+ done
+
+ COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) )
+
+ return 0
+}
+# -X option does not seem to work?
+complete -F _svnversion -o dirnames -X '*.svn*' svnversion
diff --git a/tools/client-side/bash_completion_test b/tools/client-side/bash_completion_test
new file mode 100755
index 0000000..48d4da4
--- /dev/null
+++ b/tools/client-side/bash_completion_test
@@ -0,0 +1,200 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Checks that the "_svn" function defined in the specified "bash_completion"
+# script produces appropriate lists of completions for various incomplete svn
+# command lines.
+
+THIS_DIR=`dirname "$0"`
+SCRIPT="$1"
+if [ -z "$SCRIPT" ]; then
+ SCRIPT="$THIS_DIR/bash_completion"
+fi
+
+if [ ! -r "$SCRIPT" ] || [ "$2" ]; then
+ echo "Usage: bash_completion_test [BASH_COMPLETION_PATHNAME]"
+ echo "Tests the specified \"bash_completion\" script,"
+ echo "defaulting to the one in the same directory as this test,"
+ echo "including checking it against the \"svn\" program found in the current PATH."
+ exit 1
+fi
+
+set -e # Exit on error
+shopt -s extglob
+export LC_ALL=C
+
+# Execute the script which is to be tested.
+. "$SCRIPT"
+
+# From the given incomplete command, print a space-separated list of
+# possible completions of the last argument (or of an empty first argument
+# if no subcommand is given).
+#
+# Usage: get_completions SVN-CMD [SVN-SUBCOMMAND [SVN-OPTION...]]
+# where SVN-CMD is "svn", "svnadmin", etc.; such that when a leading
+# underscore is added, it must name one of the completion functions in
+# "bash_completion".
+get_completions() {
+ SVN_CMD="$1"
+ COMP_WORDS=("$@")
+ if [ $# == 1 ]; then
+ COMP_CWORD=1
+ else
+ COMP_CWORD=$(($#-1))
+ fi
+ # Call the appropriate completion function (e.g. "_svn") with no arguments.
+ "_$SVN_CMD"
+ echo -n "${COMPREPLY[*]}"
+}
+
+# Print a failure message, record the failure, and return "false".
+# Usage: fail MESSAGE
+fail() {
+ PREFIX="FAIL: "
+ for LINE in "$@"; do
+ echo "$PREFIX$LINE"
+ PREFIX=" "
+ done
+ TESTS_FAILED=1
+ false
+}
+
+# Check that EXPECTED-WORD is among the completions of the last word in
+# SVN-ARGS. SVN-ARGS is a single argument to this function, split
+# into multiple arguments when passed to "get_completions()".
+# Usage: includes SVN-CMD SVN-ARGS EXPECTED-WORD
+includes() {
+ SVN_CMD="$1"
+ SVN_ARGS="$2"
+ EXPECTED_WORD="$3"
+ COMPLETIONS=`get_completions "$SVN_CMD" $SVN_ARGS`
+ if [[ "$EXPECTED_WORD" != @(${COMPLETIONS// /|}) ]]; then
+ fail "completions of \"$SVN_CMD $SVN_ARGS\" should include \"$EXPECTED_WORD\"" \
+ "(completions: $COMPLETIONS)"
+ fi
+}
+
+excludes() {
+ SVN_CMD="$1"
+ SVN_ARGS="$2"
+ EXPECTED_WORD="$3"
+ COMPLETIONS=`get_completions "$SVN_CMD" $SVN_ARGS`
+ if [[ "$EXPECTED_WORD" == @(${COMPLETIONS// /|}) ]]; then
+ fail "completions of \"$SVN_CMD $SVN_ARGS\" should exclude \"$EXPECTED_WORD\"" \
+ "(completions: $COMPLETIONS)"
+ fi
+}
+
+# Print the valid subcommands for an "svn"-like program, one per line, sorted.
+# Exclude any synonym that is just a truncation of its full name.
+# Usage: get_svn_subcommands SVN-CMD
+# where SVN-CMD is "svn" or another program that outputs similar help.
+get_svn_subcommands() {
+ SVN_CMD="$1"
+ "$SVN_CMD" help |
+ # Find the relevant lines.
+ sed -n -e '1,/^Available subcommands:$/d;/^$/q;p' |
+ # Remove brackets and commas
+ tr -d ' )' | tr '(,' ' ' |
+ # Remove simple abbreviations
+ ( while read SYNONYMS; do
+ for CMD in $SYNONYMS; do
+ if [ "$CMD" != "?" ]; then
+ for SYNONYM in $SYNONYMS; do
+ case $SYNONYM in
+ $CMD) ;;
+ $CMD*) CMD= ; break ;;
+ esac
+ done
+ if [ $CMD ]; then
+ echo $CMD
+ fi
+ fi
+ done
+ done
+ ) |
+ sort
+}
+
+# Print the valid option switches for "svn SUBCMD", one per line, sorted.
+# Usage: get_svn_options SVN-CMD SUBCMD
+# where SVN-CMD is "svn" or another program that outputs similar help.
+get_svn_options() {
+ SVN_CMD="$1"
+ SUBCMD="$2"
+ { "$SVN_CMD" help "$SUBCMD" |
+ # Remove deprecated options
+ grep -v deprecated |
+ # Find the relevant lines; remove "arg" and description.
+ sed -n -e '1,/^\(Valid\|Global\) options:$/d;/^ -/!d' \
+ -e 's/\( ARG\)* * : .*//;p' |
+ # Remove brackets; put each word on its own line.
+ tr -d '] ' | tr '[' '\n'
+ # The following options are always accepted but not listed in the help
+ if [ "$SUBCMD" != "help" ] ; then
+ echo "-h"
+ echo "--help"
+ fi
+ } | sort
+
+}
+
+
+# The tests.
+set +e # Do not exit on error
+TESTS_FAILED=
+
+echo "Checking general completion"
+includes svn "he" "help"
+includes svn "" "help"
+includes svn "" "--version"
+
+for SVN_CMD in svn svnadmin svndumpfilter svnlook svnrdump svnsync; do
+ echo "Checking list of subcommands: $SVN_CMD"
+ HELP_SUBCMDS=`get_svn_subcommands "$SVN_CMD" | tr "\n" " "`
+ COMPLETION_SUBCMDS=`get_completions "$SVN_CMD" | tr " " "\n" | grep -v "^-" | sort | tr "\n" " "`
+ if [ "$HELP_SUBCMDS" != "$COMPLETION_SUBCMDS" ]; then
+ fail "non-option completions for \"$SVN_CMD\" != subcommands accepted" \
+ " (non-o. cmpl.: $COMPLETION_SUBCMDS)" \
+ " (help says: $HELP_SUBCMDS)"
+ fi
+
+ echo "Checking list of options for each subcommand"
+ for SUBCMD in $HELP_SUBCMDS; do
+ HELP_OPTIONS=`get_svn_options $SVN_CMD $SUBCMD | tr "\n" " "`
+ COMPLETION_OPTIONS=`get_completions $SVN_CMD $SUBCMD - | tr " " "\n" | sort | tr "\n" " "`
+ if [ "$HELP_OPTIONS" != "$COMPLETION_OPTIONS" ]; then
+ fail "completions for \"$SVN_CMD $SUBCMD -\" != options accepted" \
+ " (completions: $COMPLETION_OPTIONS)" \
+ " (help says: $HELP_OPTIONS)"
+ fi
+ done
+done
+
+echo "Checking rejection of synonyms"
+excludes svn "diff -x -u -" "-x"
+excludes svn "diff -x -u --e" "--extensions"
+excludes svn "diff --extensions -u -" "--extensions"
+excludes svn "diff --extensions -u -" "-x"
+excludes svn "diff --extensions=-u -" "-x"
+
+if [ $TESTS_FAILED ]; then
+ echo "FAILURE: at least one bash_completion test failed."
+else
+ echo "All bash_completion tests passed."
+fi
diff --git a/tools/client-side/change-svn-wc-format.py b/tools/client-side/change-svn-wc-format.py
new file mode 100755
index 0000000..1040626
--- /dev/null
+++ b/tools/client-side/change-svn-wc-format.py
@@ -0,0 +1,420 @@
+#!/usr/bin/env python
+#
+# change-svn-wc-format.py: Change the format of a Subversion working copy.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+import sys
+import os
+import getopt
+import stat
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+### The entries file parser in subversion/tests/cmdline/svntest/entry.py
+### handles the XML-based WC entries file format used by Subversion
+### 1.3 and lower. It could be rolled into this script.
+
+LATEST_FORMATS = { "1.4" : 8,
+ "1.5" : 9,
+ "1.6" : 10,
+ # Do NOT add format 11 here. See comment in must_retain_fields
+ # for why.
+ }
+
+def usage_and_exit(error_msg=None):
+ """Write usage information and exit. If ERROR_MSG is provide, that
+ error message is printed first (to stderr), the usage info goes to
+ stderr, and the script exits with a non-zero status. Otherwise,
+ usage info goes to stdout and the script exits with a zero status."""
+ progname = os.path.basename(sys.argv[0])
+
+ stream = error_msg and sys.stderr or sys.stdout
+ if error_msg:
+ stream.write("ERROR: %s\n\n" % error_msg)
+ stream.write("""\
+usage: %s WC_PATH SVN_VERSION [--verbose] [--force] [--skip-unknown-format]
+ %s --help
+
+Change the format of a Subversion working copy to that of SVN_VERSION.
+
+ --skip-unknown-format : skip directories with unknown working copy
+ format and continue the update
+
+""" % (progname, progname))
+ stream.flush()
+ sys.exit(error_msg and 1 or 0)
+
+def get_adm_dir():
+ """Return the name of Subversion's administrative directory,
+ adjusted for the SVN_ASP_DOT_NET_HACK environment variable. See
+ <http://svn.apache.org/repos/asf/subversion/trunk/notes/asp-dot-net-hack.txt>
+ for details."""
+ return "SVN_ASP_DOT_NET_HACK" in os.environ and "_svn" or ".svn"
+
+class WCFormatConverter:
+ "Performs WC format conversions."
+ root_path = None
+ error_on_unrecognized = True
+ force = False
+ verbosity = 0
+
+ def write_dir_format(self, format_nbr, dirname, paths):
+ """Attempt to write the WC format FORMAT_NBR to the entries file
+ for DIRNAME. Throws LossyConversionException when not in --force
+ mode, and unconvertable WC data is encountered."""
+
+ # Avoid iterating in unversioned directories.
+ if not (get_adm_dir() in paths):
+ del paths[:]
+ return
+
+ # Process the entries file for this versioned directory.
+ if self.verbosity:
+ print("Processing directory '%s'" % dirname)
+ entries = Entries(os.path.join(dirname, get_adm_dir(), "entries"))
+ entries_parsed = True
+ if self.verbosity:
+ print("Parsing file '%s'" % entries.path)
+ try:
+ entries.parse(self.verbosity)
+ except UnrecognizedWCFormatException as e:
+ if self.error_on_unrecognized:
+ raise
+ sys.stderr.write("%s, skipping\n" % e)
+ sys.stderr.flush()
+ entries_parsed = False
+
+ if entries_parsed:
+ format = Format(os.path.join(dirname, get_adm_dir(), "format"))
+ if self.verbosity:
+ print("Updating file '%s'" % format.path)
+ format.write_format(format_nbr, self.verbosity)
+ else:
+ if self.verbosity:
+ print("Skipping file '%s'" % format.path)
+
+ if self.verbosity:
+ print("Checking whether WC format can be converted")
+ try:
+ entries.assert_valid_format(format_nbr, self.verbosity)
+ except LossyConversionException as e:
+ # In --force mode, ignore complaints about lossy conversion.
+ if self.force:
+ print("WARNING: WC format conversion will be lossy. Dropping "\
+ "field(s) %s " % ", ".join(e.lossy_fields))
+ else:
+ raise
+
+ if self.verbosity:
+ print("Writing WC format")
+ entries.write_format(format_nbr)
+
+ def change_wc_format(self, format_nbr):
+ """Walk all paths in a WC tree, and change their format to
+ FORMAT_NBR. Throw LossyConversionException or NotImplementedError
+ if the WC format should not be converted, or is unrecognized."""
+ for dirpath, dirs, files in os.walk(self.root_path):
+ self.write_dir_format(format_nbr, dirpath, dirs + files)
+
+class Entries:
+ """Represents a .svn/entries file.
+
+ 'The entries file' section in subversion/libsvn_wc/README is a
+ useful reference."""
+
+ # The name and index of each field composing an entry's record.
+ entry_fields = (
+ "name",
+ "kind",
+ "revision",
+ "url",
+ "repos",
+ "schedule",
+ "text-time",
+ "checksum",
+ "committed-date",
+ "committed-rev",
+ "last-author",
+ "has-props",
+ "has-prop-mods",
+ "cachable-props",
+ "present-props",
+ "conflict-old",
+ "conflict-new",
+ "conflict-wrk",
+ "prop-reject-file",
+ "copied",
+ "copyfrom-url",
+ "copyfrom-rev",
+ "deleted",
+ "absent",
+ "incomplete",
+ "uuid",
+ "lock-token",
+ "lock-owner",
+ "lock-comment",
+ "lock-creation-date",
+ "changelist",
+ "keep-local",
+ "working-size",
+ "depth",
+ "tree-conflicts",
+ "file-external",
+ )
+
+ # The format number.
+ format_nbr = -1
+
+ # How many bytes the format number takes in the file. (The format number
+ # may have leading zeroes after using this script to convert format 10 to
+ # format 9 -- which would write the format number as '09'.)
+ format_nbr_bytes = -1
+
+ def __init__(self, path):
+ self.path = path
+ self.entries = []
+
+ def parse(self, verbosity=0):
+ """Parse the entries file. Throw NotImplementedError if the WC
+ format is unrecognized."""
+
+ input = open(self.path, "r")
+
+ # Read WC format number from INPUT. Validate that it
+ # is a supported format for conversion.
+ format_line = input.readline()
+ try:
+ self.format_nbr = int(format_line)
+ self.format_nbr_bytes = len(format_line.rstrip()) # remove '\n'
+ except ValueError:
+ self.format_nbr = -1
+ self.format_nbr_bytes = -1
+ if not self.format_nbr in LATEST_FORMATS.values():
+ raise UnrecognizedWCFormatException(self.format_nbr, self.path)
+
+ # Parse file into individual entries, to later inspect for
+ # non-convertable data.
+ entry = None
+ while True:
+ entry = self.parse_entry(input, verbosity)
+ if entry is None:
+ break
+ self.entries.append(entry)
+
+ input.close()
+
+ def assert_valid_format(self, format_nbr, verbosity=0):
+ if verbosity >= 2:
+ print("Validating format for entries file '%s'" % self.path)
+ for entry in self.entries:
+ if verbosity >= 3:
+ print("Validating format for entry '%s'" % entry.get_name())
+ try:
+ entry.assert_valid_format(format_nbr)
+ except LossyConversionException:
+ if verbosity >= 3:
+ sys.stderr.write("Offending entry:\n%s\n" % entry)
+ sys.stderr.flush()
+ raise
+
+ def parse_entry(self, input, verbosity=0):
+ "Read an individual entry from INPUT stream."
+ entry = None
+
+ while True:
+ line = input.readline()
+ if line in ("", "\x0c\n"):
+ # EOF or end of entry terminator encountered.
+ break
+
+ if entry is None:
+ entry = Entry()
+
+ # Retain the field value, ditching its field terminator ("\x0a").
+ entry.fields.append(line[:-1])
+
+ if entry is not None and verbosity >= 3:
+ sys.stdout.write(str(entry))
+ print("-" * 76)
+ return entry
+
+ def write_format(self, format_nbr):
+ # Overwrite all bytes of the format number (which are the first bytes in
+ # the file). Overwrite format '10' by format '09', which will be converted
+ # to '9' by Subversion when it rewrites the file. (Subversion 1.4 and later
+ # ignore leading zeroes in the format number.)
+ assert len(str(format_nbr)) <= self.format_nbr_bytes
+ format_string = '%0' + str(self.format_nbr_bytes) + 'd'
+
+ os.chmod(self.path, stat.S_IRUSR | stat.S_IWUSR)
+ output = open(self.path, "r+", 0)
+ output.write(format_string % format_nbr)
+ output.close()
+ os.chmod(self.path, stat.S_IRUSR)
+
+class Entry:
+ "Describes an entry in a WC."
+
+ # Maps format numbers to indices of fields within an entry's record that must
+ # be retained when downgrading to that format.
+ must_retain_fields = {
+ # Not in 1.4: changelist, keep-local, depth, tree-conflicts, file-externals
+ 8 : (30, 31, 33, 34, 35),
+ # Not in 1.5: tree-conflicts, file-externals
+ 9 : (34, 35),
+ 10 : (),
+ # Downgrading from format 11 (1.7-dev) to format 10 is not possible,
+ # because 11 does not use has-props and cachable-props (but 10 does).
+ # Naively downgrading in that situation causes properties to disappear
+ # from the wc.
+ #
+ # Downgrading from the 1.7 SQLite-based format to format 10 is not
+ # implemented.
+ }
+
+ def __init__(self):
+ self.fields = []
+
+ def assert_valid_format(self, format_nbr):
+ "Assure that conversion will be non-lossy by examining fields."
+
+ # Check whether lossy conversion is being attempted.
+ lossy_fields = []
+ for field_index in self.must_retain_fields[format_nbr]:
+ if len(self.fields) - 1 >= field_index and self.fields[field_index]:
+ lossy_fields.append(Entries.entry_fields[field_index])
+ if lossy_fields:
+ raise LossyConversionException(lossy_fields,
+ "Lossy WC format conversion requested for entry '%s'\n"
+ "Data for the following field(s) is unsupported by older versions "
+ "of\nSubversion, and is likely to be subsequently discarded, and/or "
+ "have\nunexpected side-effects: %s\n\n"
+ "WC format conversion was cancelled, use the --force option to "
+ "override\nthe default behavior."
+ % (self.get_name(), ", ".join(lossy_fields)))
+
+ def get_name(self):
+ "Return the name of this entry."
+ return len(self.fields) > 0 and self.fields[0] or ""
+
+ def __str__(self):
+ "Return all fields from this entry as a multi-line string."
+ rep = ""
+ for i in range(0, len(self.fields)):
+ rep += "[%s] %s\n" % (Entries.entry_fields[i], self.fields[i])
+ return rep
+
+class Format:
+ """Represents a .svn/format file."""
+
+ def __init__(self, path):
+ self.path = path
+
+ def write_format(self, format_nbr, verbosity=0):
+ format_string = '%d\n'
+ if os.path.exists(self.path):
+ if verbosity >= 1:
+ print("%s will be updated." % self.path)
+ os.chmod(self.path, stat.S_IRUSR | stat.S_IWUSR)
+ else:
+ if verbosity >= 1:
+ print("%s does not exist, creating it." % self.path)
+ format = open(self.path, "w")
+ format.write(format_string % format_nbr)
+ format.close()
+ os.chmod(self.path, stat.S_IRUSR)
+
+class LocalException(Exception):
+ """Root of local exception class hierarchy."""
+ pass
+
+class LossyConversionException(LocalException):
+ "Exception thrown when a lossy WC format conversion is requested."
+ def __init__(self, lossy_fields, str):
+ self.lossy_fields = lossy_fields
+ self.str = str
+ def __str__(self):
+ return self.str
+
+class UnrecognizedWCFormatException(LocalException):
+ def __init__(self, format, path):
+ self.format = format
+ self.path = path
+ def __str__(self):
+ return ("Unrecognized WC format %d in '%s'; "
+ "only formats 8, 9, and 10 can be supported") % (self.format, self.path)
+
+
+def main():
+ try:
+ opts, args = my_getopt(sys.argv[1:], "vh?",
+ ["debug", "force", "skip-unknown-format",
+ "verbose", "help"])
+ except:
+ usage_and_exit("Unable to process arguments/options")
+
+ converter = WCFormatConverter()
+
+ # Process arguments.
+ if len(args) == 2:
+ converter.root_path = args[0]
+ svn_version = args[1]
+ else:
+ usage_and_exit()
+
+ # Process options.
+ debug = False
+ for opt, value in opts:
+ if opt in ("--help", "-h", "-?"):
+ usage_and_exit()
+ elif opt == "--force":
+ converter.force = True
+ elif opt == "--skip-unknown-format":
+ converter.error_on_unrecognized = False
+ elif opt in ("--verbose", "-v"):
+ converter.verbosity += 1
+ elif opt == "--debug":
+ debug = True
+ else:
+ usage_and_exit("Unknown option '%s'" % opt)
+
+ try:
+ new_format_nbr = LATEST_FORMATS[svn_version]
+ except KeyError:
+ usage_and_exit("Unsupported version number '%s'; "
+ "only 1.4, 1.5, and 1.6 can be supported" % svn_version)
+
+ try:
+ converter.change_wc_format(new_format_nbr)
+ except LocalException as e:
+ if debug:
+ raise
+ sys.stderr.write("%s\n" % e)
+ sys.stderr.flush()
+ sys.exit(1)
+
+ print("Converted WC at '%s' into format %d for Subversion %s" % \
+ (converter.root_path, new_format_nbr, svn_version))
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/client-side/mergeinfo-sanitizer.py b/tools/client-side/mergeinfo-sanitizer.py
new file mode 100755
index 0000000..db94d30
--- /dev/null
+++ b/tools/client-side/mergeinfo-sanitizer.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+import svn
+import sys
+import os
+import getopt
+import hashlib
+import pickle
+import getpass
+from svn import client, core, ra, wc
+
+## This script first fetches the mergeinfo of the working copy and tries
+## to fetch the location segments for the source paths in the respective
+## revisions present in the mergeinfo. With the obtained location segments
+## result, it creates a new mergeinfo. The depth is infinity by default.
+## This script would stop proceeding if there are any local modifications in the
+## working copy.
+
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+mergeinfo = {}
+
+def usage():
+ sys.stderr.write(""" Usage: %s WCPATH [OPTION]
+
+Analyze the mergeinfo property of the given WCPATH.
+Look for the existence of merge_source's locations at their recorded
+merge ranges. If non-existent merge source is found fix the mergeinfo.
+
+Valid Options:
+ -f [--fix] : set the svn:mergeinfo property. Not committing the changes.
+ -h [--help] : display the usage
+
+""" % os.path.basename(sys.argv[0]) )
+
+
+##
+# This function would 'svn propset' the new mergeinfo to the working copy
+##
+def set_new_mergeinfo(wcpath, newmergeinfo, ctx):
+ client.propset3("svn:mergeinfo", newmergeinfo, wcpath, core.svn_depth_empty,
+ 0, core.SVN_INVALID_REVNUM, None, None, ctx)
+
+
+##
+# Returns the md5 hash of the file
+##
+def md5_of_file(f, block_size = 2*20):
+ md5 = hashlib.md5()
+ while True:
+ data = f.read(block_size)
+ if not data:
+ break
+ md5.update(data)
+ return md5.digest()
+
+
+
+def hasher(hash_file, newmergeinfo_file):
+ new_mergeinfo = core.svn_mergeinfo_to_string(mergeinfo)
+ with open(newmergeinfo_file, "a") as buffer_file:
+ pickle.dump(new_mergeinfo, buffer_file)
+ buffer_file.close()
+
+ with open(newmergeinfo_file, "rb") as buffer_file:
+ hash_of_buffer_file = md5_of_file(buffer_file)
+ buffer_file.close()
+
+ with open(hash_file, "w") as hash_file:
+ pickle.dump(hash_of_buffer_file, hash_file)
+ hash_file.close()
+
+
+def location_segment_callback(segment, pool):
+ if segment.path is not None:
+ source_path = '/' + segment.path
+ path_ranges = mergeinfo.get(source_path, [])
+ range = svn.core.svn_merge_range_t()
+ range.start = segment.range_start - 1
+ range.end = segment.range_end
+ range.inheritable = 1
+ path_ranges.append(range)
+ mergeinfo[source_path] = path_ranges
+
+##
+# This function does the authentication in an interactive way
+##
+def prompt_func_ssl_unknown_cert(realm, failures, cert_info, may_save, pool):
+ print("The certificate details are as follows:")
+ print("--------------------------------------")
+ print("Issuer : " + str(cert_info.issuer_dname))
+ print("Hostname : " + str(cert_info.hostname))
+ print("ValidFrom : " + str(cert_info.valid_from))
+ print("ValidUpto : " + str(cert_info.valid_until))
+ print("Fingerprint: " + str(cert_info.fingerprint))
+ print("")
+ ssl_trust = core.svn_auth_cred_ssl_server_trust_t()
+ if may_save:
+ choice = raw_input( "accept (t)temporarily (p)permanently: ")
+ else:
+ choice = raw_input( "(r)Reject or accept (t)temporarily: ")
+ if choice[0] == "t" or choice[0] == "T":
+ ssl_trust.may_save = False
+ ssl_trust.accepted_failures = failures
+ elif choice[0] == "p" or choice[0] == "P":
+ ssl_trust.may_save = True
+ ssl_trust.accepted_failures = failures
+ else:
+ ssl_trust = None
+ return ssl_trust
+
+def prompt_func_simple_prompt(realm, username, may_save, pool):
+ username = raw_input("username: ")
+ password = getpass.getpass(prompt="password: ")
+ simple_cred = core.svn_auth_cred_simple_t()
+ simple_cred.username = username
+ simple_cred.password = password
+ simple_cred.may_save = False
+ return simple_cred
+
+##
+# This function tries to authenticate(if needed) and fetch the
+# location segments for the available mergeinfo and create a new
+# mergeinfo dictionary
+##
+def get_new_location_segments(parsed_original_mergeinfo, repo_root,
+ wcpath, ctx):
+
+ for path in parsed_original_mergeinfo:
+ full_url = repo_root + path
+ ra_callbacks = ra.callbacks_t()
+ ra_callbacks.auth_baton = core.svn_auth_open([
+ core.svn_auth_get_ssl_server_trust_file_provider(),
+ core.svn_auth_get_simple_prompt_provider(prompt_func_simple_prompt, 2),
+ core.svn_auth_get_ssl_server_trust_prompt_provider(prompt_func_ssl_unknown_cert),
+ svn.client.get_simple_provider(),
+ svn.client.get_username_provider()
+ ])
+ try:
+ ctx.config = core.svn_config_get_config(None)
+ ra_session = ra.open(full_url, ra_callbacks, None, ctx.config)
+
+ for revision_range in parsed_original_mergeinfo[path]:
+ try:
+ ra.get_location_segments(ra_session, "", revision_range.end,
+ revision_range.end, revision_range.start + 1, location_segment_callback)
+ except svn.core.SubversionException:
+ sys.stderr.write(" Could not find location segments for %s \n" % path)
+ except Exception as e:
+ sys.stderr.write("")
+
+
+def sanitize_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath,
+ ctx, hash_file, newmergeinfo_file, temp_pool):
+ full_mergeinfo = {}
+ for entry in parsed_original_mergeinfo:
+ get_new_location_segments(parsed_original_mergeinfo[entry], repo_root, wcpath, ctx)
+ full_mergeinfo.update(parsed_original_mergeinfo[entry])
+
+ hasher(hash_file, newmergeinfo_file)
+ diff_mergeinfo = core.svn_mergeinfo_diff(full_mergeinfo,
+ mergeinfo, 1, temp_pool)
+ #There should be no mergeinfo added by our population. There should only
+ #be deletion of mergeinfo. so take it from diff_mergeinfo[0]
+ print("The bogus mergeinfo summary:")
+ bogus_mergeinfo_deleted = diff_mergeinfo[0]
+ for bogus_mergeinfo_path in bogus_mergeinfo_deleted:
+ sys.stdout.write(bogus_mergeinfo_path + ": ")
+ for revision_range in bogus_mergeinfo_deleted[bogus_mergeinfo_path]:
+ sys.stdout.write(str(revision_range.start + 1) + "-" + str(revision_range.end) + ",")
+ print("")
+
+##
+# This function tries to 'propset the new mergeinfo into the working copy.
+# It reads the new mergeinfo from the .newmergeinfo file and verifies its
+# hash against the hash in the .hashfile
+##
+def fix_sanitized_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath,
+ ctx, hash_file, newmergeinfo_file, temp_pool):
+ has_local_modification = check_local_modifications(wcpath, temp_pool)
+ old_hash = ''
+ new_hash = ''
+ try:
+ with open(hash_file, "r") as f:
+ old_hash = pickle.load(f)
+ f.close
+ except IOError as e:
+ get_new_location_segments(parsed_original_mergeinfo, repo_root, wcpath, ctx)
+ hasher(hash_file, newmergeinfo_file)
+ try:
+ with open(hash_file, "r") as f:
+ old_hash = pickle.load(f)
+ f.close
+ except IOError:
+ hasher(hash_file, newmergeinfo_file)
+ try:
+ with open(newmergeinfo_file, "r") as f:
+ new_hash = md5_of_file(f)
+ f.close
+ except IOError as e:
+ if not mergeinfo:
+ get_new_location_segments(parsed_original_mergeinfo, repo_root, wcpath, ctx)
+ hasher(hash_file, newmergeinfo_file)
+ with open(newmergeinfo_file, "r") as f:
+ new_hash = md5_of_file(f)
+ f.close
+ if old_hash == new_hash:
+ with open(newmergeinfo_file, "r") as f:
+ newmergeinfo = pickle.load(f)
+ f.close
+ set_new_mergeinfo(wcpath, newmergeinfo, ctx)
+ if os.path.exists(newmergeinfo_file):
+ os.remove(newmergeinfo_file)
+ os.remove(hash_file)
+ else:
+ print("The hashes are not matching. Probable chance of unwanted tweaking in the mergeinfo")
+
+
+##
+# This function checks the working copy for any local modifications
+##
+def check_local_modifications(wcpath, temp_pool):
+ has_local_mod = wc.svn_wc_revision_status(wcpath, None, 0, None, temp_pool)
+ if has_local_mod.modified:
+ print("""The working copy has local modifications. Please revert them or clean
+the working copy before running the script.""")
+ sys.exit(1)
+
+def get_original_mergeinfo(wcpath, revision, depth, ctx, temp_pool):
+ propget_list = client.svn_client_propget3("svn:mergeinfo", wcpath,
+ revision, revision, depth, None,
+ ctx, temp_pool)
+
+ pathwise_mergeinfo = ""
+ pathwise_mergeinfo_list = []
+ mergeinfo_catalog = propget_list[0]
+ mergeinfo_catalog_dict = {}
+ for entry in mergeinfo_catalog:
+ mergeinfo_catalog_dict[entry] = core.svn_mergeinfo_parse(mergeinfo_catalog[entry], temp_pool)
+ return mergeinfo_catalog_dict
+
+
+def main():
+ try:
+ opts, args = my_getopt(sys.argv[1:], "h?f", ["help", "fix"])
+ except Exception as e:
+ sys.stderr.write(""" Improperly used """)
+ sys.exit(1)
+
+ if len(args) == 1:
+ wcpath = args[0]
+ wcpath = os.path.abspath(wcpath)
+ else:
+ usage()
+ sys.exit(1)
+
+ fix = 0
+ current_path = os.getcwd()
+ hash_file = os.path.join(current_path, ".hashfile")
+ newmergeinfo_file = os.path.join(current_path, ".newmergeinfo")
+
+ temp_pool = core.svn_pool_create()
+ ctx = client.svn_client_create_context(temp_pool)
+ depth = core.svn_depth_infinity
+ revision = core.svn_opt_revision_t()
+ revision.kind = core.svn_opt_revision_unspecified
+
+ for opt, values in opts:
+ if opt == "--help" or opt in ("-h", "-?"):
+ usage()
+ elif opt == "--fix" or opt == "-f":
+ fix = 1
+
+ # Check for any local modifications in the working copy
+ check_local_modifications(wcpath, temp_pool)
+
+ parsed_original_mergeinfo = get_original_mergeinfo(wcpath, revision,
+ depth, ctx, temp_pool)
+
+ repo_root = client.svn_client_root_url_from_path(wcpath, ctx, temp_pool)
+
+ core.svn_config_ensure(None)
+
+ if fix == 0:
+ sanitize_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath, ctx,
+ hash_file, newmergeinfo_file, temp_pool)
+ if fix == 1:
+ fix_sanitized_mergeinfo(parsed_original_mergeinfo, repo_root, wcpath,
+ ctx, hash_file, newmergeinfo_file, temp_pool)
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except KeyboardInterrupt:
+ print("")
+ sys.stderr.write("The script is interrupted and stopped manually.")
+ print("")
+
diff --git a/tools/client-side/server-version.py b/tools/client-side/server-version.py
new file mode 100755
index 0000000..e61a320
--- /dev/null
+++ b/tools/client-side/server-version.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# server-version.py: print a Subversion server's version number
+#
+# USAGE: server-version.py URL
+#
+# The URL can contain any path on the server, as we are simply looking
+# for Apache's response to OPTIONS, and its Server: header.
+#
+# EXAMPLE:
+#
+# $ ./server-version.py http://svn.collab.net/
+# or
+# $ ./server-version.py https://svn.collab.net/
+#
+
+import sys
+try:
+ # Python >=3.0
+ from http.client import HTTPConnection as http_client_HTTPConnection
+ from http.client import HTTPSConnection as http_client_HTTPSConnection
+ from urllib.parse import urlparse as urllib_parse_urlparse
+except ImportError:
+ # Python <3.0
+ from httplib import HTTPConnection as http_client_HTTPConnection
+ from httplib import HTTPSConnection as http_client_HTTPSConnection
+ from urlparse import urlparse as urllib_parse_urlparse
+
+
+def print_version(url):
+ scheme, netloc, path, params, query, fragment = urllib_parse_urlparse(url)
+ if scheme == 'http':
+ conn = http_client_HTTPConnection(netloc)
+ elif scheme == 'https':
+ conn = http_client_HTTPSConnection(netloc)
+ else:
+ print('ERROR: this script only supports "http" and "https" URLs')
+ sys.exit(1)
+ conn.putrequest('OPTIONS', path)
+ conn.putheader('Host', netloc)
+ conn.endheaders()
+ resp = conn.getresponse()
+ status, msg, server = (resp.status, resp.msg, resp.getheader('Server'))
+ conn.close()
+
+ # 1) Handle "OK" (200)
+ # 2) Handle redirect requests (302), if requested resource
+ # resides temporarily under a different URL
+ # 3) Handle authorization (401), if server requests for authorization
+ # ignore it, since we are interested in server version only
+ if status != 200 and status != 302 and status != 401:
+ print('ERROR: bad status response: %s %s' % (status, msg))
+ sys.exit(1)
+ if not server:
+ # a missing Server: header. Bad, bad server! Go sit in the corner!
+ print('WARNING: missing header')
+ else:
+ for part in server.split(' '):
+ if part[:4] == 'SVN/':
+ print(part[4:])
+ break
+ else:
+ # the server might be configured to hide this information, or it
+ # might not have mod_dav_svn loaded into it.
+ print('NOTICE: version unknown')
+
+
+if __name__ == '__main__':
+ if len(sys.argv) != 2:
+ print('USAGE: %s URL' % sys.argv[0])
+ sys.exit(1)
+ print_version(sys.argv[1])
diff --git a/tools/client-side/svn-graph.pl b/tools/client-side/svn-graph.pl
new file mode 100755
index 0000000..0675e8a
--- /dev/null
+++ b/tools/client-side/svn-graph.pl
@@ -0,0 +1,254 @@
+#!/usr/bin/perl -w
+# vim:ts=2:sw=2:expandtab
+#
+# svn-graph.pl - produce a GraphViz .dot graph for the branch history
+# of a node
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# View graphs using a command like:
+#
+# svn-graph.pl file:///tmp/repos | dotty -
+#
+# TODO:
+# - Calculate the repository root at runtime so the user can pass
+# the node of interest as a single URL.
+# - (Also?) produce the graphical output ourselves (SVG?) instead
+# of writing a GraphViz ".dot" data file. This can be done with
+# GraphViz using 'dot'.
+# - Display svnmerge.py/Subversion merge history.
+#
+
+use strict;
+use Getopt::Std;
+
+# Turn off output buffering
+$|=1;
+
+require SVN::Core;
+require SVN::Client;
+
+# The URL of the Subversion repository we wish to graph
+# (e.g. "http://svn.apache.org/repos/asf/subversion").
+my $repos_url;
+
+# The revision range we operate on, from $startrev -> $youngest.
+my $youngest;
+my $startrev;
+
+# This is the node we're interested in
+my $startpath;
+
+# Set the variables declared above.
+parse_commandline();
+
+# The "interesting" nodes are potential sources for copies. This list
+# grows as we move through time.
+# The "tracking" nodes are the most recent revisions of paths we're
+# following as we move through time. If we hit a delete of a path
+# we remove it from the tracking array (i.e. we're no longer interested
+# in it).
+my %interesting = ("$startpath:$startrev", 1);
+my %tracking = ("$startpath", $startrev);
+
+my %codeline_changes_forward = ();
+my %codeline_changes_back = ();
+my %copysource = ();
+my %copydest = ();
+
+write_graph_descriptor();
+#print STDERR "\n";
+
+
+
+# Validate the command-line arguments, and set the global variables
+# $repos_url, $youngest, $startrev, and $startpath.
+sub parse_commandline
+{
+ my %cmd_opts;
+ my $usage = "
+usage: svn-graph.pl [-r START_REV:END_REV] [-p PATH] REPOS_URL
+
+ -r the revision range (defaults to 0 through HEAD)
+ -p the repository-relative path (defaults to /trunk)
+ -h show this help information (other options will be ignored)
+";
+
+ # Defaults.
+ $cmd_opts{'r'} = '1:HEAD';
+ $cmd_opts{'p'} = '/trunk';
+
+ getopts('r:p:h', \%cmd_opts) or die $usage;
+
+ die $usage if scalar(@ARGV) < 1;
+ $repos_url = SVN::Core::uri_canonicalize($ARGV[0]);
+
+ $cmd_opts{'r'} =~ m/(\d+)(:(.+))?/;
+ if ($3)
+ {
+ $youngest = ($3 eq 'HEAD' ? $3 : int($3));
+ $startrev = int($1);
+ }
+ else
+ {
+ $youngest = ($3 eq 'HEAD' ? $1 : int($1));
+ $startrev = 1;
+ }
+
+ $startpath = $cmd_opts{'p'};
+
+ # Print help info (and exit nicely) if requested.
+ if ($cmd_opts{'h'})
+ {
+ print($usage);
+ exit 0;
+ }
+}
+
+# This function is a callback which is invoked for every revision as
+# we traverse change log messages.
+sub process_revision
+{
+ my $changed_paths = shift;
+ my $revision = shift || '';
+ my $author = shift || '';
+ my $date = shift || '';
+ my $message = shift || '';
+ my $pool = shift;
+
+ #print STDERR "$revision\r";
+
+ foreach my $path (keys %$changed_paths)
+ {
+ my $copyfrom_path = $$changed_paths{$path}->copyfrom_path;
+ my $copyfrom_rev = undef;
+ my $action = $$changed_paths{$path}->action;
+
+ # See if we're deleting one of our tracking nodes
+ if ($action eq 'D' and exists($tracking{$path}))
+ {
+ print "\t\"$path:$tracking{$path}\" ";
+ print "[label=\"$path:$tracking{$path}\\nDeleted in r$revision\",color=red];\n";
+ delete($tracking{$path});
+ next;
+ }
+
+ ### TODO: Display a commit which was the result of a merge
+ ### operation with [sytle=dashed,color=blue]
+
+ # If this is a copy, work out if it was from somewhere interesting
+ if (defined($copyfrom_path))
+ {
+ $copyfrom_rev = $tracking{$copyfrom_path};
+ }
+ if (defined($copyfrom_rev) &&
+ exists($interesting{$copyfrom_path . ':' . $copyfrom_rev}))
+ {
+ $interesting{$path . ':' . $revision} = 1;
+ $tracking{$path} = $revision;
+ print "\t\"$copyfrom_path:$copyfrom_rev\" -> ";
+ print " \"$path:$revision\"";
+ print " [label=\"copy at r$revision\",color=green];\n";
+
+ $copysource{"$copyfrom_path:$copyfrom_rev"} = 1;
+ $copydest{"$path:$revision"} = 1;
+ }
+
+ # For each change, we'll walk up the path one component at a time,
+ # updating any parents that we're tracking (i.e. a change to
+ # /trunk/asdf/foo updates /trunk). We mark that parent as
+ # interesting (a potential source for copies), draw a link, and
+ # update its tracking revision.
+ do
+ {
+ if (exists($tracking{$path}) && $tracking{$path} != $revision)
+ {
+ $codeline_changes_forward{"$path:$tracking{$path}"} =
+ "$path:$revision";
+ $codeline_changes_back{"$path:$revision"} =
+ "$path:$tracking{$path}";
+ $interesting{$path . ':' . $revision} = 1;
+ $tracking{$path} = $revision;
+ }
+ $path =~ s:/[^/]*$::;
+ } until ($path eq '');
+ }
+}
+
+# Write a descriptor for the graph in GraphViz .dot format to stdout.
+sub write_graph_descriptor
+{
+ my $client = SVN::Client->new;
+ # Begin writing the graph descriptor.
+ print "digraph tree {\n";
+ print "\tgraph [bgcolor=white];\n";
+ print "\tnode [color=lightblue2, style=filled];\n";
+ print "\tedge [color=black, labeljust=r];\n";
+ print "\n";
+
+ # Retrieve the requested history.
+ $client->log($repos_url, $startrev, $youngest, 1, 0, \&process_revision);
+
+ # Now ensure that everything is linked.
+ foreach my $codeline_change (keys %codeline_changes_forward)
+ {
+ # If this node is not the first in its codeline chain, and it isn't
+ # the source of a copy, it won't be the source of an edge
+ if (exists($codeline_changes_back{$codeline_change}) &&
+ !exists($copysource{$codeline_change}))
+ {
+ next;
+ }
+
+ # If this node is the first in its chain, or the source of
+ # a copy, then we'll print it, and then find the next in
+ # the chain that needs to be printed too
+ if (!exists($codeline_changes_back{$codeline_change}) or
+ exists($copysource{$codeline_change}) )
+ {
+ print "\t\"$codeline_change\" -> ";
+ my $nextchange = $codeline_changes_forward{$codeline_change};
+ my $changecount = 1;
+ while (defined($nextchange))
+ {
+ if (exists($copysource{$nextchange}) or
+ !exists($codeline_changes_forward{$nextchange}) )
+ {
+ print "\"$nextchange\" [label=\"$changecount change";
+ if ($changecount > 1)
+ {
+ print 's';
+ }
+ print '"];';
+ last;
+ }
+ $changecount++;
+ $nextchange = $codeline_changes_forward{$nextchange};
+ }
+ print "\n";
+ }
+ }
+
+ # Complete the descriptor (delaying write of font size to avoid
+ # inheritance by any subgraphs).
+ #my $title = "Family Tree\n$startpath, $startrev through $youngest";
+ #print "\tgraph [label=\"$title\", fontsize=18];\n";
+ print "}\n";
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/analyze-cmd.c b/tools/client-side/svn-mergeinfo-normalizer/analyze-cmd.c
new file mode 100644
index 0000000..2da8ca9
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/analyze-cmd.c
@@ -0,0 +1,61 @@
+/*
+ * analyze-cmd.c -- Print which MI can be elided, which one can not and why
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "mergeinfo-normalizer.h"
+
+
+/*** Code. ***/
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+svn_error_t *
+svn_min__analyze(apr_getopt_t *os,
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_min__cmd_baton_t *cmd_baton = baton;
+
+ /* If no option is given, default to "remove all you can". */
+ if ( !cmd_baton->opt_state->remove_redundants
+ && !cmd_baton->opt_state->remove_obsoletes
+ && !cmd_baton->opt_state->combine_ranges
+ && !cmd_baton->opt_state->remove_redundant_misaligned)
+ {
+ cmd_baton->opt_state->remove_redundants = TRUE;
+ cmd_baton->opt_state->remove_obsoletes = TRUE;
+ cmd_baton->opt_state->combine_ranges = TRUE;
+ cmd_baton->opt_state->remove_redundant_misaligned = TRUE;
+ }
+
+ cmd_baton->opt_state->run_analysis = TRUE;
+ cmd_baton->opt_state->dry_run = TRUE;
+
+ SVN_ERR(svn_min__run_normalize(baton, pool));
+
+ return SVN_NO_ERROR;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/help-cmd.c b/tools/client-side/svn-mergeinfo-normalizer/help-cmd.c
new file mode 100644
index 0000000..d2f1731
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/help-cmd.c
@@ -0,0 +1,191 @@
+/*
+ * help-cmd.c -- Provide help
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "svn_hash.h"
+#include "svn_string.h"
+#include "svn_config.h"
+#include "svn_dirent_uri.h"
+#include "svn_error.h"
+#include "mergeinfo-normalizer.h"
+
+#include "svn_private_config.h"
+
+
+/*** Code. ***/
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+svn_error_t *
+svn_min__help(apr_getopt_t *os,
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_min__opt_state_t *opt_state = NULL;
+ svn_stringbuf_t *version_footer = NULL;
+ const char *config_path;
+
+ char help_header[] =
+ N_("usage: svn-mergeinfo-normalizer <subcommand> [options] [args]\n"
+ "Subversion mergeinfo normalization and reduction tool.\n"
+ "Type 'svn-mergeinfo-normalizer help <subcommand>' for help on a specific\n"
+ "subcommand. Type 'svn-mergeinfo-normalizer --version' to see the program\n"
+ "version and RA modules or 'svn-mergeinfo-normalizer --version --quiet'\n"
+ "to see just the version number.\n"
+ "\n"
+ "Most subcommands take file and/or directory arguments, recursing\n"
+ "on the directories. If no arguments are supplied to such a\n"
+ "command, it recurses on the current directory (inclusive) by default.\n"
+ "\n"
+ "Available subcommands:\n");
+
+ char help_footer[] =
+ N_("Subversion is a tool for version control.\n"
+ "For additional information, see http://subversion.apache.org/\n");
+
+ const char *ra_desc_start
+ = _("The following repository access (RA) modules are available:\n\n");
+
+ if (baton)
+ {
+ svn_min__cmd_baton_t *const cmd_baton = baton;
+#ifndef SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE
+ /* Windows never actually stores plaintext passwords, it
+ encrypts the contents using CryptoAPI. ...
+
+ ... If CryptoAPI is available ... but it should be on all
+ versions of Windows that are even remotely interesting two
+ days before the scheduled end of the world, when this comment
+ is being written. */
+# ifndef WIN32
+ svn_boolean_t store_auth_creds =
+ SVN_CONFIG_DEFAULT_OPTION_STORE_AUTH_CREDS;
+ svn_boolean_t store_passwords =
+ SVN_CONFIG_DEFAULT_OPTION_STORE_PASSWORDS;
+ svn_boolean_t store_plaintext_passwords = FALSE;
+ svn_config_t *cfg;
+
+ if (cmd_baton->ctx->config)
+ {
+ cfg = svn_hash_gets(cmd_baton->ctx->config,
+ SVN_CONFIG_CATEGORY_CONFIG);
+ if (cfg)
+ {
+ SVN_ERR(svn_config_get_bool(cfg, &store_auth_creds,
+ SVN_CONFIG_SECTION_AUTH,
+ SVN_CONFIG_OPTION_STORE_AUTH_CREDS,
+ store_auth_creds));
+ SVN_ERR(svn_config_get_bool(cfg, &store_passwords,
+ SVN_CONFIG_SECTION_AUTH,
+ SVN_CONFIG_OPTION_STORE_PASSWORDS,
+ store_passwords));
+ }
+ cfg = svn_hash_gets(cmd_baton->ctx->config,
+ SVN_CONFIG_CATEGORY_SERVERS);
+ if (cfg)
+ {
+ const char *value;
+ SVN_ERR(svn_config_get_yes_no_ask
+ (cfg, &value,
+ SVN_CONFIG_SECTION_GLOBAL,
+ SVN_CONFIG_OPTION_STORE_PLAINTEXT_PASSWORDS,
+ SVN_CONFIG_DEFAULT_OPTION_STORE_PLAINTEXT_PASSWORDS));
+ if (0 == svn_cstring_casecmp(value, SVN_CONFIG_TRUE))
+ store_plaintext_passwords = TRUE;
+ }
+ }
+
+ if (store_plaintext_passwords && store_auth_creds && store_passwords)
+ {
+ version_footer = svn_stringbuf_create(
+ _("WARNING: Plaintext password storage is enabled!\n\n"),
+ pool);
+ svn_stringbuf_appendcstr(version_footer, ra_desc_start);
+ }
+# endif /* !WIN32 */
+#endif /* !SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE */
+
+ opt_state = cmd_baton->opt_state;
+ }
+
+ if (!version_footer)
+ version_footer = svn_stringbuf_create(ra_desc_start, pool);
+ SVN_ERR(svn_ra_print_modules(version_footer, pool));
+
+ /*
+ * Show auth creds storage providers.
+ */
+ SVN_ERR(svn_config_get_user_config_path(&config_path,
+ opt_state ? opt_state->config_dir
+ : NULL,
+ NULL,
+ pool));
+ svn_stringbuf_appendcstr(version_footer,
+ _("\nThe following authentication credential caches are available:\n\n"));
+
+ /*### There is no API to query available providers at run time. */
+#if (defined(WIN32) && !defined(__MINGW32__))
+ version_footer =
+ svn_stringbuf_create(apr_psprintf(pool, _("%s* Wincrypt cache in %s\n"),
+ version_footer->data,
+ svn_dirent_local_style(config_path,
+ pool)),
+ pool);
+#elif !defined(SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE)
+ version_footer =
+ svn_stringbuf_create(apr_psprintf(pool, _("%s* Plaintext cache in %s\n"),
+ version_footer->data,
+ svn_dirent_local_style(config_path,
+ pool)),
+ pool);
+#endif
+#if (defined(SVN_HAVE_GNOME_KEYRING) || defined(SVN_HAVE_LIBSECRET))
+ svn_stringbuf_appendcstr(version_footer, "* Gnome Keyring\n");
+#endif
+#ifdef SVN_HAVE_GPG_AGENT
+ svn_stringbuf_appendcstr(version_footer, "* GPG-Agent\n");
+#endif
+#ifdef SVN_HAVE_KEYCHAIN_SERVICES
+ svn_stringbuf_appendcstr(version_footer, "* Mac OS X Keychain\n");
+#endif
+#ifdef SVN_HAVE_KWALLET
+ svn_stringbuf_appendcstr(version_footer, "* KWallet (KDE)\n");
+#endif
+
+ return svn_opt_print_help4(os,
+ "svn-mergeinfo-normalizer",
+ opt_state ? opt_state->version : FALSE,
+ opt_state ? opt_state->quiet : FALSE,
+ TRUE,
+ version_footer->data,
+ help_header, /* already gettext()'d */
+ svn_min__cmd_table,
+ svn_min__options,
+ svn_min__global_options,
+ _(help_footer),
+ pool);
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/log.c b/tools/client-side/svn-mergeinfo-normalizer/log.c
new file mode 100644
index 0000000..c73d101
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/log.c
@@ -0,0 +1,1032 @@
+/*
+ * log.c -- Fetch log data and implement the log queries
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_client.h"
+#include "svn_dirent_uri.h"
+#include "svn_string.h"
+#include "svn_path.h"
+#include "svn_error.h"
+#include "svn_sorts.h"
+#include "svn_pools.h"
+#include "svn_hash.h"
+
+#include "private/svn_fspath.h"
+#include "private/svn_subr_private.h"
+#include "private/svn_sorts_private.h"
+
+#include "mergeinfo-normalizer.h"
+
+#include "svn_private_config.h"
+
+
+
+/* Describes all changes of a single revision.
+ * Note that all strings are shared within a given svn_min__log_t instance.
+ */
+typedef struct log_entry_t
+{
+ /* Revision being described. */
+ svn_revnum_t revision;
+
+ /* FS path that is equal or a parent of any in PATHS. */
+ const char *common_base;
+
+ /* Sorted list of all FS paths touched. Elements are const char*. */
+ apr_array_header_t *paths;
+} log_entry_t;
+
+/* Describes a deletion.
+ * Note that replacements are treated as additions + deletions.
+ */
+typedef struct deletion_t
+{
+ /* Path being deleted (or replaced). */
+ const char *path;
+
+ /* Revision in which this deletion happened.*/
+ svn_revnum_t revision;
+} deletion_t;
+
+/* Note that all FS paths are internalized and shared within this object.
+ */
+struct svn_min__log_t
+{
+ /* Dictionary of all FS paths used in this log.
+ * The hash itself is only temporary and will be destroyed after the log
+ * has been constructed and all paths got internalized. */
+ apr_hash_t *unique_paths;
+
+ /* Oldest revision we received. */
+ svn_revnum_t first_rev;
+
+ /* Latest revision we received. */
+ svn_revnum_t head_rev;
+
+ /* Log contents we received. Entries are log_entry_t *. */
+ apr_array_header_t *entries;
+
+ /* List of all copy operations we encountered, sorted by target&rev. */
+ apr_array_header_t *copies;
+
+ /* Like COPIES but sorted by source&source-rev. */
+ apr_array_header_t *copies_by_source;
+
+ /* List of all deletions we encountered, sorted by path&rev. */
+ apr_array_header_t *deletions;
+
+ /* If set, don't show progress nor summary. */
+ svn_boolean_t quiet;
+};
+
+/* Comparison function defining the order in svn_min__log_t.COPIES. */
+static int
+copy_order(const void *lhs,
+ const void *rhs)
+{
+ const svn_min__copy_t *lhs_copy = *(const svn_min__copy_t * const *)lhs;
+ const svn_min__copy_t *rhs_copy = *(const svn_min__copy_t * const *)rhs;
+
+ int diff = strcmp(lhs_copy->path, rhs_copy->path);
+ if (diff)
+ return diff;
+
+ if (lhs_copy->revision < rhs_copy->revision)
+ return -1;
+
+ return lhs_copy->revision == rhs_copy->revision ? 0 : 1;
+}
+
+/* Comparison function defining the order in svn_min__log_t.COPIES_BY_SOURCE.
+ */
+static int
+copy_by_source_order(const void *lhs,
+ const void *rhs)
+{
+ const svn_min__copy_t *lhs_copy = *(const svn_min__copy_t * const *)lhs;
+ const svn_min__copy_t *rhs_copy = *(const svn_min__copy_t * const *)rhs;
+
+ int diff = strcmp(lhs_copy->copyfrom_path, rhs_copy->copyfrom_path);
+ if (diff)
+ return diff;
+
+ if (lhs_copy->copyfrom_revision < rhs_copy->copyfrom_revision)
+ return -1;
+
+ return lhs_copy->copyfrom_revision == rhs_copy->copyfrom_revision ? 0 : 1;
+}
+
+/* Comparison function defining the order in svn_min__log_t.DELETIONS. */
+static int
+deletion_order(const void *lhs,
+ const void *rhs)
+{
+ const deletion_t *lhs_deletion = *(const deletion_t * const *)lhs;
+ const deletion_t *rhs_deletion = *(const deletion_t * const *)rhs;
+
+ int diff = strcmp(lhs_deletion->path, rhs_deletion->path);
+ if (diff)
+ return diff;
+
+ if (lhs_deletion->revision < rhs_deletion->revision)
+ return -1;
+
+ return lhs_deletion->revision == rhs_deletion->revision ? 0 : 1;
+}
+
+/* Return the string stored in UNIQUE_PATHS with the value PATH of PATH_LEN
+ * characters. If the hash does not have a matching entry, add one.
+ * Allocate all strings in RESULT_POOL. */
+static const char *
+internalize(apr_hash_t *unique_paths,
+ const char *path,
+ apr_ssize_t path_len,
+ apr_pool_t *result_pool)
+{
+ const char *result = apr_hash_get(unique_paths, path, path_len);
+ if (result == NULL)
+ {
+ result = apr_pstrmemdup(result_pool, path, path_len);
+ apr_hash_set(unique_paths, result, path_len, result);
+ }
+
+ return result;
+}
+
+/* Implements svn_log_entry_receiver_t. Copies the info of LOG_ENTRY into
+ * (svn_min__log_t *)BATON. */
+static svn_error_t *
+log_entry_receiver(void *baton,
+ svn_log_entry_t *log_entry,
+ apr_pool_t *scratch_pool)
+{
+ svn_min__log_t *log = baton;
+ apr_pool_t *result_pool = log->entries->pool;
+ log_entry_t *entry;
+ apr_hash_index_t *hi;
+ const char *common_base;
+ int count;
+
+ /* Don't care about empty revisions. Skip them. */
+ if (!log_entry->changed_paths || !apr_hash_count(log_entry->changed_paths))
+ return SVN_NO_ERROR;
+
+ /* Copy changed paths list. Collect deletions and copies. */
+ entry = apr_pcalloc(result_pool, sizeof(*entry));
+ entry->revision = log_entry->revision;
+ entry->paths = apr_array_make(result_pool,
+ apr_hash_count(log_entry->changed_paths),
+ sizeof(const char *));
+
+ for (hi = apr_hash_first(scratch_pool, log_entry->changed_paths);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const char *path = apr_hash_this_key(hi);
+ svn_log_changed_path_t *change = apr_hash_this_val(hi);
+
+ path = internalize(log->unique_paths, path, apr_hash_this_key_len(hi),
+ result_pool);
+ APR_ARRAY_PUSH(entry->paths, const char *) = path;
+
+ if (change->action == 'D' || change->action == 'R')
+ {
+ deletion_t *deletion = apr_pcalloc(result_pool, sizeof(*deletion));
+ deletion->path = path;
+ deletion->revision = log_entry->revision;
+
+ APR_ARRAY_PUSH(log->deletions, deletion_t *) = deletion;
+ }
+
+ if (SVN_IS_VALID_REVNUM(change->copyfrom_rev))
+ {
+ svn_min__copy_t *copy = apr_pcalloc(result_pool, sizeof(*copy));
+ copy->path = path;
+ copy->revision = log_entry->revision;
+ copy->copyfrom_path = internalize(log->unique_paths,
+ change->copyfrom_path,
+ strlen(change->copyfrom_path),
+ result_pool);
+ copy->copyfrom_revision = change->copyfrom_rev;
+
+ APR_ARRAY_PUSH(log->copies, svn_min__copy_t *) = copy;
+ }
+ }
+
+ /* Determine the common base of all changed paths. */
+ count = entry->paths->nelts;
+ if (count == 1)
+ {
+ entry->common_base = APR_ARRAY_IDX(entry->paths, 0, const char *);
+ }
+ else
+ {
+ svn_sort__array(entry->paths, svn_sort_compare_paths);
+
+ common_base = svn_dirent_get_longest_ancestor(
+ APR_ARRAY_IDX(entry->paths, 0, const char *),
+ APR_ARRAY_IDX(entry->paths, count - 1, const char *),
+ scratch_pool);
+ entry->common_base = internalize(log->unique_paths, common_base,
+ strlen(common_base), result_pool);
+ }
+
+ /* Done with that reivison. */
+ APR_ARRAY_PUSH(log->entries, log_entry_t *) = entry;
+
+ /* Update log-global state. */
+ log->first_rev = log_entry->revision;
+ if (log->head_rev == SVN_INVALID_REVNUM)
+ log->head_rev = log_entry->revision;
+
+ /* Show progress. */
+ if (log->entries->nelts % 1000 == 0 && !log->quiet)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "."));
+ SVN_ERR(svn_cmdline_fflush(stdout));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Print some statistics about LOG to console. Use SCRATCH_POOL for
+ * temporary allocations. */
+static svn_error_t *
+print_log_stats(svn_min__log_t *log,
+ apr_pool_t *scratch_pool)
+{
+ int change_count = 0;
+
+ int i;
+ for (i = 0; i < log->entries->nelts; ++i)
+ {
+ const log_entry_t *entry = APR_ARRAY_IDX(log->entries, i,
+ const log_entry_t *);
+ change_count += entry->paths->nelts;
+ }
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Received %d revisions from %ld to %ld.\n"),
+ log->entries->nelts, log->first_rev,
+ log->head_rev));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Received %d path changes.\n"),
+ change_count));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Pool has %u different paths.\n\n"),
+ apr_hash_count(log->unique_paths)));
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__log(svn_min__log_t **log,
+ const char *url,
+ svn_min__cmd_baton_t *baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_client_ctx_t *ctx = baton->ctx;
+ svn_min__log_t *result;
+
+ /* Prepare API parameters for fetching the full log for URL,
+ * including changed paths, excluding revprops.
+ */
+ apr_array_header_t *targets;
+ apr_array_header_t *revisions;
+ apr_array_header_t *revprops;
+ svn_opt_revision_t peg_revision = { svn_opt_revision_head };
+ svn_opt_revision_range_t range = { { svn_opt_revision_unspecified },
+ { svn_opt_revision_unspecified } };
+
+ targets = apr_array_make(scratch_pool, 1, sizeof(const char *));
+ APR_ARRAY_PUSH(targets, const char *) = url;
+
+ revisions = apr_array_make(scratch_pool, 1, sizeof(&range));
+ APR_ARRAY_PUSH(revisions, svn_opt_revision_range_t *) = &range;
+
+ revprops = apr_array_make(scratch_pool, 0, sizeof(const char *));
+
+ /* The log object to fill. */
+ result = apr_pcalloc(result_pool, sizeof(*result));
+ result->unique_paths = svn_hash__make(scratch_pool);
+ result->first_rev = SVN_INVALID_REVNUM;
+ result->head_rev = SVN_INVALID_REVNUM;
+ result->entries = apr_array_make(result_pool, 1024, sizeof(log_entry_t *));
+ result->copies = apr_array_make(result_pool, 1024,
+ sizeof(svn_min__copy_t *));
+ result->deletions = apr_array_make(result_pool, 1024, sizeof(deletion_t *));
+ result->quiet = baton->opt_state->quiet;
+
+ if (!baton->opt_state->quiet)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("Fetching log for %s ..."),
+ url));
+ SVN_ERR(svn_cmdline_fflush(stdout));
+ }
+
+ SVN_ERR(svn_client_log5(targets,
+ &peg_revision,
+ revisions,
+ 0, /* no limit */
+ TRUE, /* verbose */
+ TRUE, /* stop-on-copy */
+ FALSE, /* merge history */
+ revprops,
+ log_entry_receiver,
+ result,
+ ctx,
+ scratch_pool));
+
+ /* Complete arrays in RESULT. */
+ result->copies_by_source = apr_array_copy(result_pool, result->copies);
+
+ svn_sort__array_reverse(result->entries, scratch_pool);
+ svn_sort__array(result->copies, copy_order);
+ svn_sort__array(result->copies_by_source, copy_by_source_order);
+ svn_sort__array(result->deletions, deletion_order);
+
+ /* Show that we are done. */
+ if (!baton->opt_state->quiet)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "\n"));
+ SVN_ERR(print_log_stats(result, scratch_pool));
+ }
+
+ result->unique_paths = NULL;
+ *log = result;
+
+ return SVN_NO_ERROR;
+}
+
+/* Append REVISION with the INHERITABLE setting to RANGES. RANGES must be
+ * sorted and REVISION must be larger than the largest revision in RANGES. */
+static void
+append_rev_to_ranges(svn_rangelist_t *ranges,
+ svn_revnum_t revision,
+ svn_boolean_t inheritable)
+{
+ /* In many cases, we can save memory by simply extending the last range. */
+ svn_merge_range_t *range;
+ if (ranges->nelts)
+ {
+ range = APR_ARRAY_IDX(ranges, ranges->nelts - 1, svn_merge_range_t *);
+ if (range->end + 1 == revision && range->inheritable == inheritable)
+ {
+ range->end = revision;
+ return;
+ }
+ }
+
+ /* We need to add a new range. */
+ range = apr_pcalloc(ranges->pool, sizeof(*range));
+ range->start = revision - 1;
+ range->end = revision;
+ range->inheritable = inheritable;
+
+ APR_ARRAY_PUSH(ranges, svn_merge_range_t *) = range;
+}
+
+/* Comparison function comparing the log_entry_t * in *LHS with the
+ * svn_revnum_t in *rhs.
+ */
+static int
+compare_rev_log_entry(const void *lhs,
+ const void *rhs)
+{
+ const log_entry_t *entry = *(const log_entry_t * const *)lhs;
+ svn_revnum_t revision = *(const svn_revnum_t *)rhs;
+
+ if (entry->revision < revision)
+ return -1;
+
+ return entry->revision == revision ? 0 : 1;
+}
+
+/* Restrict RANGE to the range of revisions covered by LOG. Cut-off from
+ * both sides will be added to RANGES. */
+static void
+restrict_range(svn_min__log_t *log,
+ svn_merge_range_t *range,
+ svn_rangelist_t *ranges)
+{
+ /* Cut off at the earliest revision. */
+ if (range->start + 1 < log->first_rev)
+ {
+ svn_merge_range_t *new_range
+ = apr_pmemdup(ranges->pool, range, sizeof(*range));
+ new_range->end = MIN(new_range->end, log->first_rev - 1);
+
+ APR_ARRAY_PUSH(ranges, svn_merge_range_t *) = new_range;
+ range->start = new_range->end;
+ }
+
+ /* Cut off at log HEAD. */
+ if (range->end > log->head_rev)
+ {
+ svn_merge_range_t *new_range
+ = apr_pmemdup(ranges->pool, range, sizeof(*range));
+ new_range->start = log->head_rev;
+
+ APR_ARRAY_PUSH(ranges, svn_merge_range_t *) = new_range;
+ range->end = new_range->start;
+ }
+}
+
+/* Return TRUE if PATH is either equal to, a parent of or sub-path of
+ * CHANGED_PATH. */
+static svn_boolean_t
+is_relevant(const char *changed_path,
+ const char *path)
+{
+ return svn_dirent_is_ancestor(changed_path, path)
+ || svn_dirent_is_ancestor(path, changed_path);
+}
+
+/* Return TRUE if PATH is either equal to, a parent of or sub-path of
+ * SUB_TREE. Ignore REVISION and BATON but keep it for a unified signature
+ * to be used with filter_ranges. */
+static svn_boolean_t
+in_subtree(const char *changed_path,
+ const char *sub_tree,
+ svn_revnum_t revision,
+ const void *baton)
+{
+ return svn_dirent_is_ancestor(sub_tree, changed_path);
+}
+
+/* Return TRUE if
+ * - CHANGED_PATH is is either equal to or a sub-node of PATH, and
+ * - CHNAGED_PATH is outside the sub-tree given as BATON.
+ * Ignore REVISION. */
+static svn_boolean_t
+below_path_outside_subtree(const char *changed_path,
+ const char *path,
+ svn_revnum_t revision,
+ const void *baton)
+{
+ const char *subtree = baton;
+
+ /* Is this a change _below_ PATH but not within SUBTREE? */
+ return !svn_dirent_is_ancestor(subtree, changed_path)
+ && svn_dirent_is_ancestor(path, changed_path)
+ && strcmp(path, changed_path);
+}
+
+/* Baton struct to be used with change_outside_all_subtree_ranges. */
+typedef struct change_outside_baton_t
+{
+ /* Maps FS path to revision range lists. */
+ apr_hash_t *sibling_ranges;
+
+ /* Pool for temporary allocations.
+ * Baton users may clear this at will. */
+ apr_pool_t *iterpool;
+} change_outside_baton_t;
+
+/* Comparison function comparing range *LHS to revision *RHS. */
+static int
+compare_range_rev(const void *lhs,
+ const void *rhs)
+{
+ const svn_merge_range_t *range = *(const svn_merge_range_t * const *)lhs;
+ svn_revnum_t revision = *(const svn_revnum_t *)rhs;
+
+ if (range->start >= revision)
+ return 1;
+
+ return range->end < revision ? 1 : 0;
+}
+
+/* Return TRUE if CHANGED_PATH is either equal to or a sub-node of PATH,
+ * CHNAGED_PATH@REVISION is not covered by BATON->SIBLING_RANGES. */
+static svn_boolean_t
+change_outside_all_subtree_ranges(const char *changed_path,
+ const char *path,
+ svn_revnum_t revision,
+ const void *baton)
+{
+ const change_outside_baton_t *b = baton;
+ svn_boolean_t missing = TRUE;
+ apr_size_t len;
+ svn_rangelist_t *ranges;
+
+ /* Don't collect changes outside the subtree starting at PARENT_PATH. */
+ if (!svn_dirent_is_ancestor(path, changed_path))
+ return FALSE;
+
+ svn_pool_clear(b->iterpool);
+
+ /* All branches that contain CHANGED_PATH, i.e. match either it or one
+ * of its parents, must mention REVISION in their mergeinfo. */
+ for (len = strlen(changed_path);
+ !svn_fspath__is_root(changed_path, len);
+ len = strlen(changed_path))
+ {
+ ranges = apr_hash_get(b->sibling_ranges, changed_path, len);
+ if (ranges)
+ {
+ /* If any of the matching branches does not list REVISION
+ * as already merged, we found an "outside" change. */
+ if (!svn_sort__array_lookup(ranges, &revision, NULL,
+ compare_range_rev))
+ return TRUE;
+
+ /* Mergeinfo for this path has been found. */
+ missing = FALSE;
+ }
+
+ changed_path = svn_fspath__dirname(changed_path, b->iterpool);
+ }
+
+ /* Record, if no mergeinfo has been found for this CHANGED_PATH. */
+ return missing;
+}
+
+/* In LOG, scan the revisions given in RANGES and return the revision /
+ * ranges that are relevant to PATH with respect to the CHANGE_RELEVANT
+ * criterion using BATON. Keep revisions that lie outside what is covered
+ * by LOG. Allocate the result in RESULT_POOL. */
+static svn_rangelist_t *
+filter_ranges(svn_min__log_t *log,
+ const char *path,
+ svn_rangelist_t *ranges,
+ svn_boolean_t (*change_relavent)(const char *,
+ const char *,
+ svn_revnum_t,
+ const void *),
+ const void *baton,
+ apr_pool_t *result_pool)
+{
+ svn_rangelist_t *result;
+ int i, k, l;
+
+ /* Auto-complete parameters. */
+ if (!SVN_IS_VALID_REVNUM(log->first_rev))
+ return svn_rangelist_dup(ranges, result_pool);
+
+ result = apr_array_make(result_pool, 0, ranges->elt_size);
+ for (i = 0; i < ranges->nelts; ++i)
+ {
+ /* Next revision range to scan. */
+ svn_merge_range_t range
+ = *APR_ARRAY_IDX(ranges, i, const svn_merge_range_t *);
+ restrict_range(log, &range, result);
+
+ /* Find the range start and scan the range linearly. */
+ ++range.start;
+ for (k = svn_sort__bsearch_lower_bound(log->entries, &range.start,
+ compare_rev_log_entry);
+ k < log->entries->nelts;
+ ++k)
+ {
+ const log_entry_t *entry = APR_ARRAY_IDX(log->entries, k,
+ const log_entry_t *);
+ if (entry->revision > range.end)
+ break;
+
+ /* Skip revisions no relevant to PATH. */
+ if (!is_relevant(entry->common_base, path))
+ continue;
+
+ /* Look for any changed path that meets the filter criterion. */
+ for (l = 0; l < entry->paths->nelts; ++l)
+ {
+ const char *changed_path
+ = APR_ARRAY_IDX(entry->paths, l, const char *);
+
+ if (change_relavent(changed_path, path, entry->revision, baton))
+ {
+ append_rev_to_ranges(result, entry->revision,
+ range.inheritable);
+ break;
+ }
+ }
+ }
+ }
+
+ return result;
+}
+
+svn_rangelist_t *
+svn_min__operative(svn_min__log_t *log,
+ const char *path,
+ svn_rangelist_t *ranges,
+ apr_pool_t *result_pool)
+{
+ return filter_ranges(log, path, ranges, in_subtree, NULL, result_pool);
+}
+
+svn_rangelist_t *
+svn_min__operative_outside_subtree(svn_min__log_t *log,
+ const char *path,
+ const char *subtree,
+ svn_rangelist_t *ranges,
+ apr_pool_t *result_pool)
+{
+ return filter_ranges(log, path, ranges, below_path_outside_subtree,
+ subtree, result_pool);
+}
+
+svn_rangelist_t *
+svn_min__operative_outside_all_subtrees(svn_min__log_t *log,
+ const char *path,
+ svn_rangelist_t *ranges,
+ apr_hash_t *sibling_ranges,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_rangelist_t *result;
+ change_outside_baton_t baton;
+ baton.sibling_ranges = sibling_ranges;
+ baton.iterpool = svn_pool_create(scratch_pool);
+
+ result = filter_ranges(log, path, ranges, change_outside_all_subtree_ranges,
+ &baton, result_pool);
+ svn_pool_destroy(baton.iterpool);
+
+ return result;
+}
+
+svn_revnum_t
+svn_min__find_deletion(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *scratch_pool)
+{
+ svn_revnum_t latest = SVN_INVALID_REVNUM;
+
+ deletion_t *to_find = apr_pcalloc(scratch_pool, sizeof(*to_find));
+ to_find->path = path;
+ to_find->revision = end_rev;
+
+ /* Auto-complete parameters. */
+ if (!SVN_IS_VALID_REVNUM(start_rev))
+ start_rev = log->head_rev;
+
+ /* Walk up the tree and find the latest deletion of PATH or any of
+ * its parents. */
+ while (!svn_fspath__is_root(to_find->path, strlen(to_find->path)))
+ {
+ int i;
+ for (i = svn_sort__bsearch_lower_bound(log->deletions, &to_find,
+ deletion_order);
+ i < log->deletions->nelts;
+ ++i)
+ {
+ const deletion_t *deletion = APR_ARRAY_IDX(log->deletions, i,
+ const deletion_t *);
+ if (strcmp(deletion->path, to_find->path))
+ break;
+ if (deletion->revision > start_rev)
+ break;
+
+ latest = deletion->revision;
+ to_find->revision = deletion->revision;
+ }
+
+ to_find->path = svn_fspath__dirname(to_find->path, scratch_pool);
+ }
+
+ return latest;
+}
+
+apr_array_header_t *
+svn_min__find_deletions(svn_min__log_t *log,
+ const char *path,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *result = apr_array_make(result_pool, 0,
+ sizeof(svn_revnum_t));
+ int source, dest;
+
+ deletion_t *to_find = apr_pcalloc(scratch_pool, sizeof(*to_find));
+ to_find->path = path;
+ to_find->revision = 0;
+
+ /* Find deletions for PATH and its parents. */
+ if (!svn_fspath__is_root(to_find->path, strlen(to_find->path)))
+ {
+ int i;
+ for (i = svn_sort__bsearch_lower_bound(log->deletions, &to_find,
+ deletion_order);
+ i < log->deletions->nelts;
+ ++i)
+ {
+ const deletion_t *deletion = APR_ARRAY_IDX(log->deletions, i,
+ const deletion_t *);
+ if (strcmp(deletion->path, to_find->path))
+ break;
+
+ APR_ARRAY_PUSH(result, svn_revnum_t) = deletion->revision;
+ }
+
+ to_find->path = svn_fspath__dirname(to_find->path, scratch_pool);
+ }
+
+ /* Remove any duplicates (unlikely but possible). */
+ svn_sort__array(result, svn_sort_compare_revisions);
+ for (source = 1, dest = 0; source < result->nelts; ++source)
+ {
+ svn_revnum_t source_rev = APR_ARRAY_IDX(result, source, svn_revnum_t);
+ svn_revnum_t dest_rev = APR_ARRAY_IDX(result, dest, svn_revnum_t);
+ if (source_rev != dest_rev)
+ {
+ ++dest_rev;
+ APR_ARRAY_IDX(result, dest, svn_revnum_t) = source_rev;
+ }
+ }
+
+ if (result->nelts)
+ result->nelts = dest + 1;
+
+ return result;
+}
+
+/* Starting at REVISION, scan LOG for the next (in REVISION or older) copy
+ * that creates PATH explicitly or implicitly by creating a parent of it.
+ * Return the copy operation found or NULL if none exists. Use SCRATCH_POOL
+ * for temporary allocations. */
+static const svn_min__copy_t *
+next_copy(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t revision,
+ apr_pool_t *scratch_pool)
+{
+ const svn_min__copy_t *copy = NULL;
+ int idx;
+
+ svn_min__copy_t *to_find = apr_pcalloc(scratch_pool, sizeof(*to_find));
+ to_find->path = path;
+ to_find->revision = revision;
+
+ idx = svn_sort__bsearch_lower_bound(log->copies, &to_find, copy_order);
+ if (idx < log->copies->nelts)
+ {
+ /* Found an exact match? */
+ copy = APR_ARRAY_IDX(log->copies, idx, const svn_min__copy_t *);
+ if (copy->revision != revision || strcmp(copy->path, path))
+ copy = NULL;
+ }
+
+ if (!copy && idx > 0)
+ {
+ /* No exact match. The predecessor may be the closest copy. */
+ copy = APR_ARRAY_IDX(log->copies, idx - 1, const svn_min__copy_t *);
+ if (strcmp(copy->path, path))
+ copy = NULL;
+ }
+
+ /* Mabye, the parent folder got copied later, i.e. is the closest copy.
+ We implicitly recurse up the tree. */
+ if (!svn_fspath__is_root(to_find->path, strlen(to_find->path)))
+ {
+ const svn_min__copy_t *parent_copy;
+ to_find->path = svn_fspath__dirname(to_find->path, scratch_pool);
+
+ parent_copy = next_copy(log, to_find->path, revision, scratch_pool);
+ if (!copy)
+ copy = parent_copy;
+ else if (parent_copy && parent_copy->revision > copy->revision)
+ copy = parent_copy;
+ }
+
+ return copy;
+}
+
+svn_revnum_t
+svn_min__find_copy(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *scratch_pool)
+{
+ const svn_min__copy_t *copy;
+
+ /* Auto-complete parameters. */
+ if (!SVN_IS_VALID_REVNUM(start_rev))
+ start_rev = log->head_rev;
+
+ /* The actual lookup. */
+ copy = next_copy(log, path, start_rev, scratch_pool);
+ if (copy && copy->revision >= end_rev)
+ return copy->revision;
+
+ return SVN_NO_ERROR;
+}
+
+apr_array_header_t *
+svn_min__get_copies(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *result = apr_array_make(result_pool, 0,
+ sizeof(svn_min__copy_t *));
+ const svn_min__copy_t **copies = (void *)log->copies_by_source->elts;
+ int idx;
+
+ /* Find all sub-tree copies, including PATH. */
+ svn_min__copy_t *to_find = apr_pcalloc(scratch_pool, sizeof(*to_find));
+ to_find->copyfrom_path = path;
+ to_find->copyfrom_revision = end_rev;
+
+ for (idx = svn_sort__bsearch_lower_bound(log->copies_by_source,
+ &to_find,
+ copy_by_source_order);
+ (idx < log->copies->nelts)
+ && svn_dirent_is_ancestor(path, copies[idx]->copyfrom_path);
+ ++idx)
+ {
+ if (copies[idx]->copyfrom_revision <= start_rev)
+ APR_ARRAY_PUSH(result, const svn_min__copy_t *) = copies[idx];
+ }
+
+ /* Find all parent copies. */
+ while (!svn_fspath__is_root(to_find->copyfrom_path,
+ strlen(to_find->copyfrom_path)))
+ {
+ to_find->copyfrom_path = svn_fspath__dirname(to_find->copyfrom_path,
+ scratch_pool);
+
+ for (idx = svn_sort__bsearch_lower_bound(log->copies_by_source,
+ &to_find,
+ copy_by_source_order);
+ (idx < log->copies->nelts)
+ && !strcmp(copies[idx]->copyfrom_path, to_find->copyfrom_path)
+ && (copies[idx]->copyfrom_revision <= start_rev);
+ ++idx)
+ {
+ APR_ARRAY_PUSH(result, const svn_min__copy_t *) = copies[idx];
+ }
+ }
+
+ return result;
+}
+
+/* A history segment. Simply a FS path plus the revision range that it is
+ * part of the history of the node. */
+typedef struct segment_t
+{
+ /* FS path at which the node lives in this segment */
+ const char *path;
+
+ /* Revision that it appears in or that the history was truncated to. */
+ svn_revnum_t start;
+
+ /* Revision from which the node was copied to the next segment or the
+ * revision that the history was truncated to. */
+ svn_revnum_t end;
+} segment_t;
+
+apr_array_header_t *
+svn_min__get_history(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ segment_t *segment;
+ const svn_min__copy_t *copy;
+ apr_array_header_t *result = apr_array_make(result_pool, 16,
+ sizeof(segment_t *));
+
+ /* Auto-complete parameters. */
+ if (!SVN_IS_VALID_REVNUM(start_rev))
+ start_rev = log->head_rev;
+
+ /* Simply follow all copies, each time adding a segment from "here" to
+ * the next copy. */
+ for (copy = next_copy(log, path, start_rev, scratch_pool);
+ copy && start_rev >= end_rev;
+ copy = next_copy(log, path, start_rev, scratch_pool))
+ {
+ segment = apr_pcalloc(result_pool, sizeof(*segment));
+ segment->start = MAX(end_rev, copy->revision);
+ segment->end = start_rev;
+ segment->path = apr_pstrdup(result_pool, path);
+
+ APR_ARRAY_PUSH(result, segment_t *) = segment;
+
+ start_rev = copy->copyfrom_revision;
+ path = svn_fspath__join(copy->copyfrom_path,
+ svn_fspath__skip_ancestor(copy->path, path),
+ scratch_pool);
+ }
+
+ /* The final segment has no copy-from. */
+ if (start_rev >= end_rev)
+ {
+ segment = apr_pcalloc(result_pool, sizeof(*segment));
+ segment->start = end_rev;
+ segment->end = start_rev;
+ segment->path = apr_pstrdup(result_pool, path);
+
+ APR_ARRAY_PUSH(result, segment_t *) = segment;
+ }
+
+ return result;
+}
+
+apr_array_header_t *
+svn_min__intersect_history(apr_array_header_t *lhs,
+ apr_array_header_t *rhs,
+ apr_pool_t *result_pool)
+{
+ apr_array_header_t *result = apr_array_make(result_pool, 16,
+ sizeof(segment_t *));
+
+ int lhs_idx = 0;
+ int rhs_idx = 0;
+
+ /* Careful: the segments are ordered latest to oldest. */
+ while (lhs_idx < lhs->nelts && rhs_idx < rhs->nelts)
+ {
+ segment_t *lhs_segment = APR_ARRAY_IDX(lhs, lhs_idx, segment_t *);
+ segment_t *rhs_segment = APR_ARRAY_IDX(rhs, rhs_idx, segment_t *);
+
+ /* Skip non-overlapping revision segments */
+ if (lhs_segment->start > rhs_segment->end)
+ {
+ ++lhs_idx;
+ continue;
+ }
+ else if (lhs_segment->end < rhs_segment->start)
+ {
+ ++rhs_idx;
+ continue;
+ }
+
+ /* Revision ranges overlap. Also the same path? */
+ if (!strcmp(lhs_segment->path, rhs_segment->path))
+ {
+ segment_t *segment = apr_pcalloc(result_pool, sizeof(*segment));
+ segment->start = MAX(lhs_segment->start, rhs_segment->start);
+ segment->end = MIN(lhs_segment->end, rhs_segment->end);
+ segment->path = apr_pstrdup(result_pool, lhs_segment->path);
+
+ APR_ARRAY_PUSH(result, segment_t *) = segment;
+ }
+
+ /* The segment that starts earlier may overlap with another one.
+ If they should start at the same rev, the next iteration will
+ skip the respective other segment. */
+ if (lhs_segment->start > rhs_segment->start)
+ ++lhs_idx;
+ else
+ ++rhs_idx;
+ }
+
+ return result;
+}
+
+svn_rangelist_t *
+svn_min__history_ranges(apr_array_header_t *history,
+ apr_pool_t *result_pool)
+{
+ svn_rangelist_t *result = apr_array_make(result_pool, history->nelts,
+ sizeof(svn_merge_range_t *));
+
+ int i;
+ for (i = 0; i < history->nelts; ++i)
+ {
+ const segment_t *segment = APR_ARRAY_IDX(history, i, segment_t *);
+
+ /* Convert to merge ranges. Note that start+1 is the first rev
+ actually in that range. */
+ svn_merge_range_t *range = apr_pcalloc(result_pool, sizeof(*range));
+ range->start = MAX(0, segment->start - 1);
+ range->end = segment->end;
+ range->inheritable = TRUE;
+
+ APR_ARRAY_PUSH(result, svn_merge_range_t *) = range;
+ }
+
+ return result;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/logic.c b/tools/client-side/svn-mergeinfo-normalizer/logic.c
new file mode 100644
index 0000000..4911c08
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/logic.c
@@ -0,0 +1,1915 @@
+/*
+ * logic.c -- Mergeinfo normalization / cleanup logic used by the commands.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "svn_cmdline.h"
+#include "svn_dirent_uri.h"
+#include "svn_hash.h"
+#include "svn_path.h"
+#include "svn_pools.h"
+#include "private/svn_fspath.h"
+#include "private/svn_sorts_private.h"
+
+#include "mergeinfo-normalizer.h"
+
+#include "svn_private_config.h"
+
+
+/*** Code. ***/
+
+/* Scan RANGES for reverse merge ranges and return a copy of them,
+ * allocated in RESULT_POOL. */
+static svn_rangelist_t *
+find_reverse_ranges(svn_rangelist_t *ranges,
+ apr_pool_t *result_pool)
+{
+ svn_rangelist_t *result = apr_array_make(result_pool, 0, ranges->elt_size);
+
+ int i;
+ for (i = 0; i < ranges->nelts; ++i)
+ {
+ const svn_merge_range_t *range
+ = APR_ARRAY_IDX(ranges, i, const svn_merge_range_t *);
+
+ if (range->start >= range->end)
+ APR_ARRAY_PUSH(result, const svn_merge_range_t *) = range;
+ }
+
+ return result;
+}
+
+/* Scan RANGES for non-recursive merge ranges and return a copy of them,
+ * allocated in RESULT_POOL. */
+static svn_rangelist_t *
+find_non_recursive_ranges(svn_rangelist_t *ranges,
+ apr_pool_t *result_pool)
+{
+ svn_rangelist_t *result = apr_array_make(result_pool, 0, ranges->elt_size);
+
+ int i;
+ for (i = 0; i < ranges->nelts; ++i)
+ {
+ const svn_merge_range_t *range
+ = APR_ARRAY_IDX(ranges, i, const svn_merge_range_t *);
+
+ if (!range->inheritable)
+ APR_ARRAY_PUSH(result, const svn_merge_range_t *) = range;
+ }
+
+ return result;
+}
+
+/* Print RANGES, prefixed by TITLE to console. Use SCRATCH_POOL for
+ * temporary allocations. */
+static svn_error_t *
+print_ranges(svn_rangelist_t *ranges,
+ const char *title,
+ apr_pool_t *scratch_pool)
+{
+ svn_string_t *string;
+
+ SVN_ERR(svn_rangelist_to_string(&string, ranges, scratch_pool));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _(" %s%s\n"),
+ title, string->data));
+
+ return SVN_NO_ERROR;
+}
+
+/* Depending on the settings in OPT_STATE, write a message on console
+ * that SUBTREE_PATH is not mentioned in the parent mergeinfo. If the
+ * MISALIGNED flag is set, then the relative path did not match.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_missing_parent(const char *subtree_path,
+ svn_boolean_t misaligned,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ /* Be quiet in normal processing mode. */
+ if (!opt_state->verbose && !opt_state->run_analysis)
+ return SVN_NO_ERROR;
+
+ if (misaligned)
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" MISALIGNED branch: %s\n"),
+ subtree_path));
+ else
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" MISSING in parent: %s\n"),
+ subtree_path));
+
+ return SVN_NO_ERROR;
+}
+
+/* If REVERSE_RANGES is not empty and depending on the options in OPT_STATE,
+ * show those ranges as "reverse ranges" for path SUBTREE_PATH.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_reverse_ranges(const char *subtree_path,
+ svn_rangelist_t *reverse_ranges,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (!reverse_ranges->nelts)
+ return SVN_NO_ERROR;
+
+ if (opt_state->verbose || opt_state->run_analysis)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" REVERSE RANGE(S) found for %s:\n"),
+ subtree_path));
+ SVN_ERR(print_ranges(reverse_ranges, "", scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* If NON_RECURSIVE_RANGES is not empty and depending on the options in
+ * OPT_STATE, show those ranges as "reverse ranges" for path SUBTREE_PATH.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_non_recursive_ranges(const char *subtree_path,
+ svn_rangelist_t *non_recursive_ranges,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (!non_recursive_ranges->nelts)
+ return SVN_NO_ERROR;
+
+ if (opt_state->verbose || opt_state->run_analysis)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" NON-RECURSIVE RANGE(S) found for %s:\n"),
+ subtree_path));
+ SVN_ERR(print_ranges(non_recursive_ranges, "", scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Show the elision result of a single BRANCH (for a single node) on
+ * console filtered by the OPT_STATE. OPERATIVE_OUTSIDE_SUBTREE and
+ * OPERATIVE_IN_SUBTREE are the revision ranges that prevented an elision.
+ * SUBTREE_ONLY and PARENT_ONLY were differences that have been adjusted.
+ * IMPLIED_IN_PARENT and IMPLIED_IN_SUBTREE are differences that could be
+ * ignored. Uses SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_branch_elision(const char *branch,
+ svn_rangelist_t *subtree_only,
+ svn_rangelist_t *parent_only,
+ svn_rangelist_t *operative_outside_subtree,
+ svn_rangelist_t *operative_in_subtree,
+ svn_rangelist_t *implied_in_parent,
+ svn_rangelist_t *implied_in_subtree,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (opt_state->verbose && !subtree_only->nelts && !parent_only->nelts)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" elide redundant branch %s\n"),
+ branch));
+ return SVN_NO_ERROR;
+ }
+
+ if (operative_outside_subtree->nelts || operative_in_subtree->nelts)
+ {
+ if (opt_state->verbose || opt_state->run_analysis)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" CANNOT elide branch %s\n"),
+ branch));
+ if (operative_outside_subtree->nelts)
+ SVN_ERR(print_ranges(operative_outside_subtree,
+ _("revisions not movable to parent: "),
+ scratch_pool));
+ if (operative_in_subtree->nelts)
+ SVN_ERR(print_ranges(operative_in_subtree,
+ _("revisions missing in sub-node: "),
+ scratch_pool));
+ }
+ }
+ else if ( opt_state->verbose
+ || (opt_state->run_analysis && ( implied_in_parent->nelts
+ || subtree_only->nelts
+ || implied_in_subtree->nelts
+ || parent_only->nelts)))
+ {
+ SVN_ERR(svn_rangelist_remove(&subtree_only, implied_in_parent,
+ subtree_only, TRUE, scratch_pool));
+ SVN_ERR(svn_rangelist_remove(&parent_only, implied_in_subtree,
+ parent_only, TRUE, scratch_pool));
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" elide branch %s\n"),
+ branch));
+ if (implied_in_parent->nelts)
+ SVN_ERR(print_ranges(implied_in_parent,
+ _("revisions implied in parent: "),
+ scratch_pool));
+ if (subtree_only->nelts)
+ SVN_ERR(print_ranges(subtree_only,
+ _("revisions moved to parent: "),
+ scratch_pool));
+ if (implied_in_subtree->nelts)
+ SVN_ERR(print_ranges(implied_in_subtree,
+ _("revisions implied in sub-node: "),
+ scratch_pool));
+ if (parent_only->nelts)
+ SVN_ERR(print_ranges(parent_only,
+ _("revisions inoperative in sub-node: "),
+ scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Progress tacking data structure. */
+typedef struct progress_t
+{
+ /* Number of nodes with mergeinfo that we need to process. */
+ int nodes_total;
+
+ /* Number of nodes still to process. */
+ int nodes_todo;
+
+ /* Counter for nodes where the mergeinfo could be removed entirely. */
+ apr_int64_t nodes_removed;
+
+ /* Number mergeinfo lines removed because the respective branches had
+ * been deleted. */
+ apr_int64_t obsoletes_removed;
+
+ /* Number of ranges combined so far. */
+ apr_int64_t ranges_removed;
+
+ /* Transient flag used to indicate whether we still have to print a
+ * header before showing various details. */
+ svn_boolean_t needs_header;
+} progress_t;
+
+/* Describes the "deletion" state of a branch. */
+typedef enum deletion_state_t
+{
+ /* Path still exists. */
+ ds_exists,
+
+ /* Path does not exist but has not been deleted.
+ * Catch-up merges etc. may introduce the path. */
+ ds_implied,
+
+ /* A (possibly indirect) copy of the path or one of its sub-nodes still
+ * exists. */
+ ds_has_copies,
+
+ /* The path has been deleted (explicitly or indirectly via parent) and
+ * no copy exists @HEAD. */
+ ds_deleted
+} deletion_state_t;
+
+/* Show the "removing obsoletes" header depending on OPT_STATE and PROGRESS.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_removing_obsoletes(svn_min__opt_state_t *opt_state,
+ progress_t *progress,
+ apr_pool_t *scratch_pool)
+{
+ if ( opt_state->remove_obsoletes
+ && opt_state->verbose
+ && progress
+ && progress->needs_header)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("\n Trying to remove obsolete entries ...\n")));
+ progress->needs_header = FALSE;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* If in verbose mode according to OPT_STATE, print the deletion status
+ * DELETION_STATE for SUBTREE_PATH to the console. If REPORT_NON_REMOVALS
+ * is set, report missing branches that can't be removed from mergeinfo.
+ * In that case, show a SURVIVING_COPY when appropriate.
+ *
+ * Prefix the output with the appropriate section header based on the state
+ * tracked in PROGRESS. Use SCRATCH_POOL for temporaries.
+ */
+static svn_error_t *
+show_removed_branch(const char *subtree_path,
+ svn_min__opt_state_t *opt_state,
+ deletion_state_t deletion_state,
+ svn_boolean_t report_non_removals,
+ const char *surviving_copy,
+ progress_t *progress,
+ apr_pool_t *scratch_pool)
+{
+ if (opt_state->verbose)
+ switch (deletion_state)
+ {
+ case ds_deleted:
+ SVN_ERR(show_removing_obsoletes(opt_state, progress,
+ scratch_pool));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" remove deleted branch %s\n"),
+ subtree_path));
+ break;
+
+ case ds_implied:
+ if (report_non_removals)
+ {
+ SVN_ERR(show_removing_obsoletes(opt_state, progress,
+ scratch_pool));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" keep POTENTIAL branch %s\n"),
+ subtree_path));
+ }
+ break;
+
+ case ds_has_copies:
+ if (report_non_removals)
+ {
+ SVN_ERR(show_removing_obsoletes(opt_state, progress,
+ scratch_pool));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" has SURVIVING COPIES: %s\n"),
+ subtree_path));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" e.g.: %s\n"),
+ surviving_copy));
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* If COPY copies SOURCE or one of its ancestors, return the path that the
+ * node has in the copy target, allocated in RESULT_POOL. Otherwise,
+ * simply return the copy target, allocated in RESULT_POOL. */
+static const char *
+get_copy_target_path(const char *source,
+ const svn_min__copy_t *copy,
+ apr_pool_t *result_pool)
+{
+ if (svn_dirent_is_ancestor(copy->copyfrom_path, source))
+ {
+ const char *relpath = svn_dirent_skip_ancestor(copy->copyfrom_path,
+ source);
+ return svn_dirent_join(copy->path, relpath, result_pool);
+ }
+
+ return apr_pstrdup(result_pool, copy->path);
+}
+
+/* Scan LOG for a copies of PATH or one of its sub-nodes from the segment
+ * starting at START_REV down to END_REV. Follow those copies until we
+ * find one that has not been deleted @HEAD. If none exist, return NULL.
+ * Otherwise the return first such copy we find, allocated in RESULT_POOL.
+ * Use SCRATCH_POOL for temporaries. */
+static const char *
+find_surviving_copy(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ const char * survivor = NULL;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_array_header_t *copies = svn_min__get_copies(log, path, start_rev,
+ end_rev, scratch_pool,
+ scratch_pool);
+
+ int i;
+ for (i = 0; (i < copies->nelts) && !survivor; ++i)
+ {
+ const char *copy_target;
+ const svn_min__copy_t *copy;
+ svn_revnum_t deletion_rev;
+ svn_pool_clear(iterpool);
+
+ copy = APR_ARRAY_IDX(copies, i, const svn_min__copy_t *);
+ copy_target = get_copy_target_path(path, copy, iterpool);
+
+ /* Is this a surviving copy? */
+ deletion_rev = svn_min__find_deletion(log, copy_target,
+ SVN_INVALID_REVNUM,
+ copy->revision, iterpool);
+ if (SVN_IS_VALID_REVNUM(deletion_rev))
+ {
+ /* Are there surviving sub-copies? */
+ survivor = find_surviving_copy(log, copy_target,
+ copy->revision, deletion_rev - 1,
+ result_pool, iterpool);
+ }
+ else
+ {
+ survivor = apr_pstrdup(result_pool, copy_target);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return survivor;
+}
+
+/* Scan LOG for a copies of PATH or one of its sub-nodes from the segment
+ * starting at START_REV down to END_REV. Follow those copies and collect
+ * those that have not been deleted @HEAD. Return them in *SURVIVORS,
+ * allocated in RESULT_POOL. Use SCRATCH_POOL for temporary allocations. */
+static void
+find_surviving_copies(apr_array_header_t *survivors,
+ svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_array_header_t *copies = svn_min__get_copies(log, path, start_rev,
+ end_rev, scratch_pool,
+ scratch_pool);
+
+ int i;
+ for (i = 0; i < copies->nelts; ++i)
+ {
+ const char *copy_target;
+ const svn_min__copy_t *copy;
+ svn_revnum_t deletion_rev;
+ svn_pool_clear(iterpool);
+
+ copy = APR_ARRAY_IDX(copies, i, const svn_min__copy_t *);
+ copy_target = get_copy_target_path(path, copy, iterpool);
+
+ /* Is this a surviving copy? */
+ deletion_rev = svn_min__find_deletion(log, copy_target,
+ SVN_INVALID_REVNUM,
+ copy->revision, iterpool);
+ if (SVN_IS_VALID_REVNUM(deletion_rev))
+ {
+ /* Are there surviving sub-copies? */
+ find_surviving_copies(survivors, log, copy_target,
+ copy->revision, deletion_rev - 1,
+ result_pool, iterpool);
+ }
+ else
+ {
+ APR_ARRAY_PUSH(survivors, const char *) = apr_pstrdup(result_pool,
+ copy_target);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+}
+
+/* Using LOOKUP and LOG, determine the deletion *STATE of PATH. OPT_STATE,
+ * PROGRESS and REPORT_NON_REMOVALS control the console output. OPT_STATE
+ * also makes this a no-op if removal of deleted branches is not been
+ * enabled in it.
+ *
+ * If LOCAL_ONLY is set, only remove branches that are known to have been
+ * deleted (as per LOOKUP) with no surviving copies etc. This is for quick
+ * checks.
+ *
+ * Track progress in PROGRESS and update MERGEINFO if we can remove the
+ * info for branch PATH from it.
+ *
+ * Use SCRATCH_POOL for temporaries.
+ */
+static svn_error_t *
+remove_obsolete_line(deletion_state_t *state,
+ svn_min__branch_lookup_t *lookup,
+ svn_min__log_t *log,
+ svn_mergeinfo_t mergeinfo,
+ const char *path,
+ svn_min__opt_state_t *opt_state,
+ progress_t *progress,
+ svn_boolean_t local_only,
+ svn_boolean_t report_non_removals,
+ apr_pool_t *scratch_pool)
+{
+ svn_boolean_t deleted;
+ const char *surviving_copy = NULL;
+
+ /* Skip if removal of deleted branches has not been . */
+ if (!opt_state->remove_obsoletes)
+ {
+ *state = ds_exists;
+ return SVN_NO_ERROR;
+ }
+
+ SVN_ERR(svn_min__branch_lookup(&deleted, lookup, path, local_only,
+ scratch_pool));
+ if (deleted)
+ {
+ if (log)
+ {
+ svn_revnum_t creation_rev, deletion_rev;
+
+ /* Look for an explicit deletion since the last creation
+ * (or parent creation). Otherwise, the PATH never existed
+ * but is implied and may be needed as soon as there is a
+ * catch-up merge. */
+ creation_rev = svn_min__find_copy(log, path, SVN_INVALID_REVNUM,
+ 0, scratch_pool);
+ deletion_rev = svn_min__find_deletion(log, path,
+ SVN_INVALID_REVNUM,
+ creation_rev, scratch_pool);
+ surviving_copy = find_surviving_copy(log, path,
+ SVN_IS_VALID_REVNUM(deletion_rev)
+ ? deletion_rev - 1
+ : deletion_rev,
+ creation_rev,
+ scratch_pool, scratch_pool);
+
+ if (surviving_copy)
+ {
+ *state = ds_has_copies;
+ }
+ else
+ {
+ *state = SVN_IS_VALID_REVNUM(deletion_rev) ? ds_deleted
+ : ds_implied;
+ }
+ }
+ else
+ {
+ *state = ds_deleted;
+ }
+
+ /* Remove branch if it has actually been deleted. */
+ if (*state == ds_deleted)
+ {
+ svn_hash_sets(mergeinfo, path, NULL);
+
+ if (progress)
+ ++progress->obsoletes_removed;
+ }
+ }
+ else
+ {
+ *state = ds_exists;
+ }
+
+ SVN_ERR(show_removed_branch(path, opt_state, *state, report_non_removals,
+ surviving_copy, progress, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* If enabled in OPT_STATE, use LOG and LOOKUP to remove all lines form
+ * MERGEINFO that refer to deleted branches.
+ *
+ * If LOCAL_ONLY is set, only remove branches that are known to have been
+ * deleted as per LOOKUP - this is for quick checks. Track progress in
+ * PROGRESS.
+ *
+ * Use SCRATCH_POOL for temporaries.
+ */
+static svn_error_t *
+remove_obsolete_lines(svn_min__branch_lookup_t *lookup,
+ svn_min__log_t *log,
+ svn_mergeinfo_t mergeinfo,
+ svn_min__opt_state_t *opt_state,
+ progress_t *progress,
+ svn_boolean_t local_only,
+ apr_pool_t *scratch_pool)
+{
+ int i;
+ apr_array_header_t *sorted_mi;
+ apr_pool_t *iterpool;
+
+ /* Skip if removal of deleted branches has not been . */
+ if (!opt_state->remove_obsoletes)
+ return SVN_NO_ERROR;
+
+ iterpool = svn_pool_create(scratch_pool);
+
+ /* Sort branches by name to ensure a nicely sorted operations log. */
+ sorted_mi = svn_sort__hash(mergeinfo,
+ svn_sort_compare_items_lexically,
+ scratch_pool);
+
+ /* Only show the section header if we removed at least one line. */
+ progress->needs_header = TRUE;
+
+ /* Simply iterate over all branches mentioned in the mergeinfo. */
+ for (i = 0; i < sorted_mi->nelts; ++i)
+ {
+ const char *path = APR_ARRAY_IDX(sorted_mi, i, svn_sort__item_t).key;
+ deletion_state_t state;
+
+ svn_pool_clear(iterpool);
+ SVN_ERR(remove_obsolete_line(&state, lookup, log, mergeinfo, path,
+ opt_state, progress, local_only, TRUE,
+ iterpool));
+ }
+
+ progress->needs_header = FALSE;
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Return the ancestor of CHILD such that adding RELPATH to it leads to
+ * CHILD. Return an empty string if no such ancestor exists. Allocate the
+ * result in RESULT_POOL. */
+static const char *
+get_parent_path(const char *child,
+ const char *relpath,
+ apr_pool_t *result_pool)
+{
+ apr_size_t child_len = strlen(child);
+ apr_size_t rel_path_len = strlen(relpath);
+
+ if (child_len > rel_path_len)
+ {
+ apr_size_t parent_len = child_len - rel_path_len - 1;
+ if ( child[parent_len] == '/'
+ && !strcmp(child + parent_len + 1, relpath))
+ return apr_pstrmemdup(result_pool, child, parent_len);
+ }
+
+ return "";
+}
+
+/* Remove all ranges from *RANGES where the history of SOURCE_PATH@RANGE
+ * and TARGET_PATH@HEAD overlap. Return the list of *REMOVED ranges,
+ * allocated in RESULT_POOL. Use SCRATCH_POOL for temporary allocations.
+ *
+ * Note that SOURCE_PATH@RANGE may actually refer to different branches
+ * created or re-created and then deleted at different points in time.
+ */
+static svn_error_t *
+remove_overlapping_history(svn_rangelist_t **removed,
+ svn_rangelist_t **ranges,
+ svn_min__log_t *log,
+ const char *source_path,
+ const char *target_path,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *target_history;
+ apr_array_header_t *source_history;
+ apr_array_header_t *deletions;
+ svn_revnum_t source_rev, next_deletion;
+ apr_pool_t *iterpool;
+ int i;
+
+ svn_rangelist_t *result = apr_array_make(result_pool, 0,
+ sizeof(svn_merge_range_t *));
+
+ /* In most cases, there is nothing to do. */
+ if (!(*ranges)->nelts)
+ {
+ *removed = result;
+ return SVN_NO_ERROR;
+ }
+
+ /* The history of the working copy branch ("target") is always the same. */
+ iterpool = svn_pool_create(scratch_pool);
+ target_history = svn_min__get_history(log, target_path, SVN_INVALID_REVNUM,
+ 0, scratch_pool, scratch_pool);
+
+ /* Collect the deletion revisions, i.e. the revisons separating different
+ branches with the same name. */
+ deletions = svn_min__find_deletions(log, source_path, scratch_pool,
+ scratch_pool);
+ next_deletion = SVN_INVALID_REVNUM;
+
+ /* Get the history of each of these branches up to the point where the
+ respective previous branch was deleted (or r0). Intersect with the
+ target history and RANGES. */
+ for (i = 0; i <= deletions->nelts; ++i)
+ {
+ apr_array_header_t *common_history;
+ apr_array_header_t *common_ranges;
+ apr_array_header_t *removable_ranges;
+ svn_pool_clear(iterpool);
+
+ /* First iteration: HEAD to whatever latest deletion or r0.
+
+ NEXT_DELETION points to the last revision that may contain
+ changes of the previous branch at SOURCE_PATH. The deletion
+ rev itself is not relevant but may instead contains the modyfing
+ creation of the next incarnation of that branch. */
+ source_rev = next_deletion;
+ next_deletion = i < deletions->nelts
+ ? APR_ARRAY_IDX(deletions, i, svn_revnum_t) - 1
+ : 0;
+
+ /* Determine the overlapping history of merge source & target. */
+ source_history = svn_min__get_history(log, source_path,
+ source_rev, next_deletion,
+ iterpool, iterpool);
+ common_history = svn_min__intersect_history(source_history,
+ target_history, iterpool);
+
+ /* Remove that overlap from RANGES. */
+ common_ranges = svn_min__history_ranges(common_history, iterpool);
+ if (!common_ranges->nelts)
+ continue;
+
+ SVN_ERR(svn_rangelist_intersect(&removable_ranges, common_ranges,
+ *ranges, TRUE, iterpool));
+ SVN_ERR(svn_rangelist_remove(ranges, removable_ranges, *ranges, TRUE,
+ (*ranges)->pool));
+ SVN_ERR(svn_rangelist_merge2(result, removable_ranges, result_pool,
+ result_pool));
+ }
+
+ svn_pool_destroy(iterpool);
+ *removed = result;
+
+ return SVN_NO_ERROR;
+}
+
+/* Scan RANGES for non-recursive ranges. If there are any, remove all
+ * ranges that where the history of SOURCE_PATH@RANGE and TARGET_PATH@HEAD
+ * overlap. Also remove all ranges that are not operative on OP_PATH.
+ *
+ * The remaining ranges are the ones actually relevant to a future merge.
+ * Return those in *NON_RECURSIVE_RANGES, allocated in RESULT_POOL.
+ * Use SCRATCH_POOL for temporary allocations.
+ *
+ * Note that SOURCE_PATH@RANGE may actually refer to different branches
+ * created or re-created and then deleted at different points in time.
+ */
+static svn_error_t *
+find_relevant_non_recursive_ranges(svn_rangelist_t **non_recursive_ranges,
+ svn_rangelist_t *ranges,
+ svn_min__log_t *log,
+ const char *source_path,
+ const char *target_path,
+ const char *op_path,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *implied;
+ apr_array_header_t *result
+ = find_non_recursive_ranges(ranges, scratch_pool);
+
+ SVN_ERR(remove_overlapping_history(&implied, &result,
+ log, source_path, target_path,
+ scratch_pool, scratch_pool));
+ *non_recursive_ranges = svn_min__operative(log, op_path, result,
+ result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Show the results of an attempt at "misaligned branch elision".
+ * SOURCE_BRANCH was to be elided because TARGET_BRANCH would cover it all.
+ * There were MISSING revisions exclusively in SOURCE_BRANCH. OPT_STATE
+ * filters the output and SCRATCH_POOL is used for temporary allocations.
+ */
+static svn_error_t *
+show_misaligned_branch_elision(const char *source_branch,
+ const char *target_branch,
+ svn_rangelist_t *missing,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (opt_state->verbose || opt_state->run_analysis)
+ {
+ if (missing->nelts)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" CANNOT elide MISALIGNED branch %s\n"
+ " to likely correctly aligned branch %s\n"),
+ source_branch, target_branch));
+ SVN_ERR(print_ranges(missing,
+ _("revisions not merged from likely correctly"
+ " aligned branch: "),
+ scratch_pool));
+ }
+ else
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" elide misaligned branch %s\n"
+ " to likely correctly aligned branch %s\n"),
+ source_branch, target_branch));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Search MERGEINFO for branches that are sub-branches of one another.
+ * If exactly one of them shares the base with the FS_PATH to which the m/i
+ * is attached, than this is likely the properly aligned branch while the
+ * others are misaligned.
+ *
+ * Using LOG, determine those misaligned branches whose operative merged
+ * revisions are already covered by the merged revisions of the likely
+ * correctly aligned branch. In that case, remove those misaligned branch
+ * entries from MERGEINFO.
+ *
+ * OPT_STATE filters the output and SCRATCH_POOL is used for temporaries.
+ */
+static svn_error_t *
+remove_redundant_misaligned_branches(svn_min__log_t *log,
+ const char *fs_path,
+ svn_mergeinfo_t mergeinfo,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ int i, k;
+ const char *base_name = svn_dirent_basename(fs_path, scratch_pool);
+ apr_array_header_t *sorted_mi;
+
+ sorted_mi = svn_sort__hash(mergeinfo,
+ svn_sort_compare_items_lexically,
+ scratch_pool);
+
+ for (i = 0; i < sorted_mi->nelts - 1; i = k)
+ {
+ const char *item_path, *sub_item_path;
+ int maybe_aligned_index = -1;
+ int maybe_aligned_found = 0;
+ int sub_branch_count = 0;
+
+ svn_pool_clear(iterpool);
+
+ /* Find the range of branches that are sub-branches of the one at I. */
+ item_path = APR_ARRAY_IDX(sorted_mi, i, svn_sort__item_t).key;
+ if (!strcmp(base_name, svn_dirent_basename(item_path, iterpool)))
+ {
+ maybe_aligned_index = i;
+ maybe_aligned_found = 1;
+ }
+
+ for (k = i + 1; k < sorted_mi->nelts; ++k)
+ {
+ sub_item_path = APR_ARRAY_IDX(sorted_mi, k, svn_sort__item_t).key;
+ if (!svn_dirent_is_ancestor(item_path, sub_item_path))
+ break;
+
+ if (!strcmp(base_name,
+ svn_dirent_basename(sub_item_path, iterpool)))
+ {
+ maybe_aligned_index = k;
+ maybe_aligned_found++;
+ }
+ }
+
+ /* Found any? If so, did we identify exactly one of them as likely
+ * being properly aligned? */
+ sub_branch_count = k - i - 1;
+ if ((maybe_aligned_found != 1) || (sub_branch_count == 0))
+ continue;
+
+ /* Try to elide all misaligned branches individually. */
+ for (k = i; k < i + sub_branch_count + 1; ++k)
+ {
+ svn_sort__item_t *source_item, *target_item;
+ svn_rangelist_t *missing, *dummy;
+
+ /* Is this one of the misaligned branches? */
+ if (k == maybe_aligned_index)
+ continue;
+
+ source_item = &APR_ARRAY_IDX(sorted_mi, k, svn_sort__item_t);
+ target_item = &APR_ARRAY_IDX(sorted_mi, maybe_aligned_index,
+ svn_sort__item_t);
+
+ /* Elide into sub-branch or parent branch (can't be equal here).
+ * Because we only know these are within the I tree, source and
+ * target may be siblings. Check that they actually have an
+ * ancestor relationship.
+ */
+ if (k < maybe_aligned_index)
+ {
+ if (!svn_dirent_is_ancestor(source_item->key, target_item->key))
+ continue;
+ }
+ else
+ {
+ if (!svn_dirent_is_ancestor(target_item->key, source_item->key))
+ continue;
+ }
+
+ /* Determine which revisions are MISSING in target. */
+ SVN_ERR(svn_rangelist_diff(&missing, &dummy,
+ source_item->value, target_item->value,
+ TRUE, iterpool));
+ missing = svn_min__operative(log, source_item->key, missing,
+ iterpool);
+
+ /* Show the result and elide the branch if we can. */
+ SVN_ERR(show_misaligned_branch_elision(source_item->key,
+ target_item->key,
+ missing,
+ opt_state,
+ iterpool));
+ if (!missing->nelts)
+ svn_hash_sets(mergeinfo, source_item->key, NULL);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Try to elide as many lines from SUBTREE_MERGEINFO for node at FS_PATH as
+ * possible using LOG and LOOKUP. OPT_STATE determines if we may remove
+ * deleted branches. Elision happens by comparing the node's mergeinfo
+ * with the PARENT_MERGEINFO using REL_PATH to match up the branch paths.
+ *
+ * SIBLING_MERGEINFO contains the mergeinfo of all nodes with mergeinfo
+ * immediately below the parent. It can be used to "summarize" m/i over
+ * all sub-nodes and elide that to the parent.
+ *
+ * Use SCRATCH_POOL for temporaries.
+ */
+static svn_error_t *
+remove_lines(svn_min__log_t *log,
+ svn_min__branch_lookup_t *lookup,
+ const char *fs_path,
+ const char *relpath,
+ svn_mergeinfo_t parent_mergeinfo,
+ svn_mergeinfo_t subtree_mergeinfo,
+ apr_array_header_t *sibling_mergeinfo,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_array_header_t *sorted_mi;
+ int i;
+
+ sorted_mi = svn_sort__hash(subtree_mergeinfo,
+ svn_sort_compare_items_lexically,
+ scratch_pool);
+ for (i = 0; i < sorted_mi->nelts; ++i)
+ {
+ const char *parent_path, *subtree_path, *parent_fs_path;
+ svn_rangelist_t *parent_ranges, *subtree_ranges;
+ svn_rangelist_t *reverse_ranges, *non_recursive_ranges;
+ svn_rangelist_t *subtree_only, *parent_only;
+ svn_rangelist_t *operative_outside_subtree, *operative_in_subtree;
+ svn_rangelist_t *implied_in_subtree, *implied_in_parent;
+ const svn_sort__item_t *item;
+ deletion_state_t state;
+
+ svn_pool_clear(iterpool);
+
+ item = &APR_ARRAY_IDX(sorted_mi, i, svn_sort__item_t);
+ subtree_path = item->key;
+
+ /* Maybe, this branch is known to be obsolete anyway.
+ Do a quick check based on previous lookups. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, TRUE, FALSE,
+ iterpool));
+ if (state == ds_deleted)
+ continue;
+
+ /* Find the parent m/i entry for the same branch. */
+ parent_path = get_parent_path(subtree_path, relpath, iterpool);
+ parent_fs_path = get_parent_path(fs_path, relpath, iterpool);
+ subtree_ranges = item->value;
+ parent_ranges = svn_hash_gets(parent_mergeinfo, parent_path);
+
+ /* We don't know how to handle reverse ranges (there should be none).
+ So, we must check for them - just to be sure. */
+ reverse_ranges = find_reverse_ranges(subtree_ranges, iterpool);
+ if (reverse_ranges->nelts)
+ {
+ /* We really found a reverse revision range!?
+ Try to get rid of it. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+ if (state != ds_deleted)
+ SVN_ERR(show_reverse_ranges(subtree_path, reverse_ranges,
+ opt_state, iterpool));
+
+ continue;
+ }
+
+ /* We don't know how to handle non-recursive ranges (they are legal,
+ * though). So, we must if there are any that would actually
+ * affect future merges. */
+ SVN_ERR(find_relevant_non_recursive_ranges(&non_recursive_ranges,
+ subtree_ranges, log,
+ subtree_path, fs_path,
+ subtree_path,
+ iterpool, iterpool));
+ if (non_recursive_ranges->nelts)
+ {
+ /* We really found non-recursive merges?
+ Try to get rid of them. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+ if (state != ds_deleted)
+ SVN_ERR(show_non_recursive_ranges(subtree_path,
+ non_recursive_ranges,
+ opt_state, iterpool));
+
+ continue;
+ }
+
+ if (parent_ranges && parent_ranges->nelts)
+ {
+ /* Any non-recursive ranges at the parent node that are
+ * operative on the sub-node and not implicit part of the
+ * branch history? */
+ SVN_ERR(find_relevant_non_recursive_ranges(&non_recursive_ranges,
+ parent_ranges, log,
+ parent_path,
+ parent_fs_path,
+ subtree_path,
+ iterpool, iterpool));
+ if (non_recursive_ranges->nelts)
+ {
+ /* We really found non-recursive merges at the parent?
+ Try to get rid of them at the parent and sub-node alike. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, parent_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+ if (state == ds_deleted)
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+ if (state != ds_deleted)
+ SVN_ERR(show_non_recursive_ranges(parent_path,
+ non_recursive_ranges,
+ opt_state, iterpool));
+
+ continue;
+ }
+ }
+
+ /* Are there any parent ranges to which to elide sub-tree m/i? */
+ if (!parent_ranges)
+ {
+ /* There is none. Before we flag that as a problem, maybe the
+ branch has been deleted after all? This time contact the
+ repository. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+
+ if (state == ds_deleted)
+ continue;
+
+ /* Find revs that are missing in the sub-tree- m/i but affect
+ paths in the sub-tree. */
+ subtree_only = subtree_ranges;
+ operative_in_subtree
+ = svn_min__operative(log, subtree_path, subtree_only, iterpool);
+ SVN_ERR(remove_overlapping_history(&implied_in_subtree,
+ &operative_in_subtree, log,
+ subtree_path, fs_path,
+ iterpool, iterpool));
+
+ if (operative_in_subtree->nelts)
+ {
+ /* If still relevant, we need to keep the whole m/i on this
+ node. Therefore, report the problem. */
+ SVN_ERR(show_missing_parent(subtree_path, !*parent_path,
+ opt_state, scratch_pool));
+ }
+ else
+ {
+ /* This branch entry is some sort of artefact that doesn't
+ refer to any actual changes and can therefore be removed.
+ Report why that is. */
+ apr_array_header_t *empty = operative_in_subtree;
+ SVN_ERR(svn_rangelist_remove(&subtree_only, implied_in_subtree,
+ subtree_only, TRUE, iterpool));
+ SVN_ERR(show_branch_elision(subtree_path, empty,
+ subtree_only, empty, empty, empty,
+ implied_in_subtree, opt_state,
+ iterpool));
+
+ svn_hash_sets(subtree_mergeinfo, subtree_path, NULL);
+ }
+
+ continue;
+ }
+
+ /* Try the actual elision, i.e. compare parent and sub-tree m/i.
+ Where they don't fit, figure out if they can be aligned. */
+ SVN_ERR(svn_rangelist_diff(&parent_only, &subtree_only,
+ parent_ranges, subtree_ranges, TRUE,
+ iterpool));
+
+ /* From the set of revisions missing on the parent, remove those that
+ don't actually affect the sub-tree. Those can safely be ignored. */
+ subtree_only
+ = svn_min__operative(log, subtree_path, subtree_only, iterpool);
+
+ /* Find revs that are missing in the parent m/i but affect paths
+ outside the sub-tree. */
+ operative_outside_subtree
+ = svn_min__operative_outside_subtree(log, parent_path, subtree_path,
+ subtree_only, iterpool);
+
+ /* Find revs that are missing in the sub-tree- m/i but affect paths in
+ the sub-tree. */
+ operative_in_subtree
+ = svn_min__operative(log, subtree_path, parent_only, iterpool);
+
+ /* Remove revision ranges that are implied by the "natural" history
+ of the merged branch vs. the current branch. */
+ SVN_ERR(remove_overlapping_history(&implied_in_subtree,
+ &operative_in_subtree, log,
+ subtree_path, fs_path,
+ iterpool, iterpool));
+ SVN_ERR(remove_overlapping_history(&implied_in_parent,
+ &operative_outside_subtree, log,
+ parent_path, parent_fs_path,
+ iterpool, iterpool));
+
+ /* Before we show a branch as "CANNOT elide", make sure it is even
+ still relevant. */
+ if ( operative_outside_subtree->nelts
+ || operative_in_subtree->nelts)
+ {
+ /* This branch can't be elided. Maybe, it is obsolete anyway. */
+ SVN_ERR(remove_obsolete_line(&state, lookup, log,
+ subtree_mergeinfo, subtree_path,
+ opt_state, NULL, FALSE, FALSE,
+ iterpool));
+ if (state == ds_deleted)
+ continue;
+ }
+
+ /* Try harder:
+ * There are cases where a merge affected multiple sibling nodes, got
+ * recorded there but was not recorded at the parent. Remove these
+ * from the list of revisions that couldn't be propagated to the
+ * parent node. */
+ if (operative_outside_subtree->nelts && sibling_mergeinfo->nelts > 1)
+ {
+ apr_hash_t *sibling_ranges;
+ SVN_ERR(svn_min__sibling_ranges(&sibling_ranges, sibling_mergeinfo,
+ parent_path,
+ operative_outside_subtree,
+ iterpool, iterpool));
+
+ operative_outside_subtree
+ = svn_min__operative_outside_all_subtrees(log, parent_path,
+ operative_outside_subtree,
+ sibling_ranges,
+ iterpool, iterpool);
+ }
+
+ /* Log whether an elision was possible. */
+ SVN_ERR(show_branch_elision(subtree_path, subtree_only,
+ parent_only, operative_outside_subtree,
+ operative_in_subtree, implied_in_parent,
+ implied_in_subtree, opt_state, iterpool));
+
+ /* This will also work when subtree_only is empty. */
+ if ( !operative_outside_subtree->nelts
+ && !operative_in_subtree->nelts)
+ {
+ SVN_ERR(svn_rangelist_merge2(parent_ranges, subtree_only,
+ parent_ranges->pool, iterpool));
+ svn_hash_sets(subtree_mergeinfo, subtree_path, NULL);
+ }
+ }
+
+ /* TODO: Move subtree ranges to parent even if the parent has no entry
+ * for the respective branches, yet. */
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Return TRUE if revisions START to END are inoperative on PATH, according
+ * to LOG. Use SCRATCH_POOL for temporaries. */
+static svn_boolean_t
+inoperative(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start,
+ svn_revnum_t end,
+ apr_pool_t *scratch_pool)
+{
+ svn_merge_range_t range = { 0 };
+ apr_array_header_t *ranges = apr_array_make(scratch_pool, 1, sizeof(&range));
+
+ range.start = start - 1;
+ range.end = end;
+ APR_ARRAY_PUSH(ranges, svn_merge_range_t *) = &range;
+
+ return svn_min__operative(log, path, ranges, scratch_pool)->nelts == 0;
+}
+
+/* Use LOG to determine what revision ranges in MERGEINFO can be combined
+ * because the revisions in between them are inoperative on the respective
+ * branch (sub-)path. Combine those revision ranges and update PROGRESS.
+ * Make this a no-op if it has not been enabled in OPT_STATE.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+shorten_lines(svn_mergeinfo_t mergeinfo,
+ svn_min__log_t *log,
+ svn_min__opt_state_t *opt_state,
+ progress_t *progress,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_hash_index_t *hi;
+
+ /* Skip if this operation has not been enabled. */
+ if (!opt_state->combine_ranges)
+ return SVN_NO_ERROR;
+
+ /* Process each branch independently. */
+ for (hi = apr_hash_first(scratch_pool, mergeinfo);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ int source, dest;
+ const char *path = apr_hash_this_key(hi);
+ svn_rangelist_t *ranges = apr_hash_this_val(hi);
+
+ /* Skip edge cases. */
+ if (ranges->nelts < 2 || find_reverse_ranges(ranges, iterpool)->nelts)
+ continue;
+
+ /* Merge ranges where possible. */
+ for (source = 1, dest = 0; source < ranges->nelts; ++source)
+ {
+ svn_merge_range_t *source_range
+ = APR_ARRAY_IDX(ranges, source, svn_merge_range_t *);
+ svn_merge_range_t *dest_range
+ = APR_ARRAY_IDX(ranges, dest, svn_merge_range_t *);
+
+ svn_pool_clear(iterpool);
+
+ if ( (source_range->inheritable == dest_range->inheritable)
+ && inoperative(log, path, dest_range->end + 1,
+ source_range->start, iterpool))
+ {
+ dest_range->end = source_range->end;
+ }
+ else
+ {
+ ++dest;
+ APR_ARRAY_IDX(ranges, dest, svn_merge_range_t *)
+ = source_range;
+ }
+ }
+
+ /* Update progress. */
+ progress->ranges_removed += ranges->nelts - dest - 1;
+ ranges->nelts = dest + 1;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Construct a 1-line progress info based on the PROGRESS and selected
+ * processing options in OPT_STATE. Allocate the result in RESULT_POOL
+ * and use SCRATCH_POOL for temporaries. */
+static const char *
+progress_string(const progress_t *progress,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ const char *obsoletes_str = apr_psprintf(scratch_pool,
+ "%" APR_UINT64_T_FMT,
+ progress->obsoletes_removed);
+ const char *nodes_str = apr_psprintf(scratch_pool,
+ "%" APR_UINT64_T_FMT,
+ progress->nodes_removed);
+ const char *ranges_str = apr_psprintf(scratch_pool,
+ "%" APR_UINT64_T_FMT,
+ progress->ranges_removed);
+
+ svn_stringbuf_t *result = svn_stringbuf_create_empty(result_pool);
+ svn_stringbuf_appendcstr(result,
+ apr_psprintf(scratch_pool,
+ _("Processed %d nodes"),
+ progress->nodes_total
+ - progress->nodes_todo));
+
+ if (opt_state->remove_obsoletes)
+ svn_stringbuf_appendcstr(result,
+ apr_psprintf(scratch_pool,
+ _(", removed %s branches"),
+ obsoletes_str));
+
+ if (opt_state->remove_redundants)
+ svn_stringbuf_appendcstr(result,
+ apr_psprintf(scratch_pool,
+ _(", removed m/i on %s sub-nodes"),
+ nodes_str));
+
+ if (opt_state->combine_ranges)
+ svn_stringbuf_appendcstr(result,
+ apr_psprintf(scratch_pool,
+ _(", combined %s ranges"),
+ ranges_str));
+
+ return result->data;
+}
+
+/* Depending on the options in OPT_STATE, print the header to be shown
+ * before processing the m/i at REL_PATH relative to the parent mergeinfo
+ * at PARENT_PATH. If there is no parent m/i, RELPATH is empty.
+ * Use SCRATCH_POOL temporary allocations.*/
+static svn_error_t *
+show_elision_header(const char *parent_path,
+ const char *relpath,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (opt_state->verbose)
+ {
+ /* In verbose mode, be specific of what gets elided to where. */
+ if (*relpath)
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("Trying to elide mergeinfo from path\n"
+ " %s\n"
+ " into mergeinfo at path\n"
+ " %s\n\n"),
+ svn_dirent_join(parent_path, relpath,
+ scratch_pool),
+ parent_path));
+ else
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("Trying to elide mergeinfo at path\n"
+ " %s\n\n"),
+ parent_path));
+ }
+ else if (opt_state->run_analysis)
+ {
+ /* If we are not in analysis mode, only the progress would be shown
+ * and we would stay quiet here. */
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("Trying to elide mergeinfo at path %s\n"),
+ svn_dirent_join(parent_path, relpath,
+ scratch_pool)));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Given the PARENT_MERGEINFO and the current nodes's SUBTREE_MERGEINFO
+ * after the processing / elision attempt, print a summary of the results
+ * to console. Get the verbosity setting from OPT_STATE. Use SCRATCH_POOL
+ * for temporary allocations. */
+static svn_error_t *
+show_elision_result(svn_mergeinfo_t parent_mergeinfo,
+ svn_mergeinfo_t subtree_mergeinfo,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ if (opt_state->verbose)
+ {
+ /* In verbose mode, tell the user what branches survived. */
+ if (apr_hash_count(subtree_mergeinfo))
+ {
+ apr_array_header_t *sorted_mi;
+ int i;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+
+ if (parent_mergeinfo)
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("\n Sub-tree merge info cannot be elided due to "
+ "the following branch(es):\n")));
+ else
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("\n Merge info kept for the following branch(es):\n")));
+
+ sorted_mi = svn_sort__hash(subtree_mergeinfo,
+ svn_sort_compare_items_lexically,
+ scratch_pool);
+ for (i = 0; i < sorted_mi->nelts; ++i)
+ {
+ const char *branch = APR_ARRAY_IDX(sorted_mi, i,
+ svn_sort__item_t).key;
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _(" %s\n"),
+ branch));
+ }
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("\n")));
+ svn_pool_destroy(iterpool);
+ }
+ else
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("\n All sub-tree mergeinfo has been elided.\n\n")));
+ }
+ }
+ else if (opt_state->run_analysis)
+ {
+ /* If we are not in analysis mode, only the progress would be shown
+ * and we would stay quiet here. */
+ if (apr_hash_count(subtree_mergeinfo))
+ {
+ if (parent_mergeinfo)
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("\n")));
+ else
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Keeping top-level mergeinfo.\n")));
+ }
+ else
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" All sub-tree mergeinfo has been elided.\n\n")));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Main normalization function. Process all mergeinfo in WC_MERGEINFO, one
+ * by one, bottom-up and try to elide it by comparing it with and aligning
+ * it to the respective parent mergeinfo. This modified the contents of
+ * WC_MERGEINFO.
+ *
+ * LOG and LOOKUP provide the repository info needed to perform the
+ * normalization steps selected in OPT_STATE. LOG and LOOKUP may be NULL.
+ *
+ * Use SCRATCH_POOL for temporary allocations.
+ */
+static svn_error_t *
+normalize(apr_array_header_t *wc_mergeinfo,
+ svn_min__log_t *log,
+ svn_min__branch_lookup_t *lookup,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ progress_t progress = { 0 };
+
+ int i;
+ progress.nodes_total = wc_mergeinfo->nelts;
+ for (i = wc_mergeinfo->nelts - 1; i >= 0; --i)
+ {
+ const char *parent_path;
+ const char *relpath;
+ const char *fs_path;
+ svn_mergeinfo_t parent_mergeinfo;
+ svn_mergeinfo_t subtree_mergeinfo;
+ svn_mergeinfo_t subtree_mergeinfo_copy;
+ svn_mergeinfo_t mergeinfo_to_report;
+ apr_array_header_t *sibling_mergeinfo;
+
+ svn_pool_clear(iterpool);
+ progress.nodes_todo = i;
+
+ /* Get the relevant mergeinfo. */
+ svn_min__get_mergeinfo_pair(&fs_path, &parent_path, &relpath,
+ &parent_mergeinfo, &subtree_mergeinfo,
+ &sibling_mergeinfo, wc_mergeinfo, i);
+ SVN_ERR(show_elision_header(parent_path, relpath, opt_state,
+ scratch_pool));
+
+ /* Get rid of some of the easier cases of misaligned branches.
+ * Directly modify the orignal mergeinfo. */
+ if (opt_state->remove_redundant_misaligned)
+ SVN_ERR(remove_redundant_misaligned_branches(log, fs_path,
+ subtree_mergeinfo,
+ opt_state, iterpool));
+
+ /* Modify this copy of the mergeinfo.
+ * If we can elide it all, drop the original. */
+ subtree_mergeinfo_copy = svn_mergeinfo_dup(subtree_mergeinfo,
+ iterpool);
+
+ /* Eliminate redundant sub-node mergeinfo. */
+ if (opt_state->remove_redundants && parent_mergeinfo)
+ {
+ svn_mergeinfo_t parent_mergeinfo_copy;
+ mergeinfo_to_report = subtree_mergeinfo_copy;
+
+ /* Try to elide the mergeinfo for all branches. */
+ parent_mergeinfo_copy = svn_mergeinfo_dup(parent_mergeinfo,
+ iterpool);
+
+ SVN_ERR(remove_lines(log, lookup, fs_path, relpath,
+ parent_mergeinfo_copy, subtree_mergeinfo_copy,
+ sibling_mergeinfo, opt_state, iterpool));
+
+ /* If all sub-tree mergeinfo could be elided, clear it. Update
+ the parent mergeinfo in case we moved some up the tree. */
+ if (apr_hash_count(subtree_mergeinfo_copy) == 0)
+ {
+ SVN_ERR(svn_mergeinfo_merge2(parent_mergeinfo,
+ parent_mergeinfo_copy,
+ apr_hash_pool_get(parent_mergeinfo),
+ iterpool));
+ apr_hash_clear(subtree_mergeinfo);
+ ++progress.nodes_removed;
+ }
+ else
+ {
+ /* We have to keep the sub-tree m/i but we can remove entries
+ for deleted branches from it. */
+ SVN_ERR(remove_obsolete_lines(lookup, log, subtree_mergeinfo,
+ opt_state, &progress, FALSE,
+ iterpool));
+ }
+ }
+ else
+ {
+ /* Eliminate deleted branches. */
+ mergeinfo_to_report = subtree_mergeinfo;
+ SVN_ERR(remove_obsolete_lines(lookup, log, subtree_mergeinfo,
+ opt_state, &progress, FALSE,
+ iterpool));
+ }
+
+ /* Reduce the number of remaining ranges. */
+ SVN_ERR(shorten_lines(subtree_mergeinfo, log, opt_state, &progress,
+ iterpool));
+
+ /* Display what's left. */
+ SVN_ERR(show_elision_result(parent_mergeinfo, mergeinfo_to_report,
+ opt_state, scratch_pool));
+
+ /* Print progress info. */
+ if ( !opt_state->verbose && !opt_state->run_analysis
+ && !opt_state->quiet && i % 100 == 0)
+ SVN_ERR(svn_cmdline_printf(iterpool, " %s.\n",
+ progress_string(&progress, opt_state,
+ iterpool, iterpool)));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Return TRUE, if the operations selected in OPT_STATE require the log. */
+static svn_boolean_t
+needs_log(svn_min__opt_state_t *opt_state)
+{
+ return opt_state->combine_ranges || opt_state->remove_redundants;
+}
+
+/* Return TRUE, if the operations selected in OPT_STATE require a
+ * connection (session) to the repository. */
+static svn_boolean_t
+needs_session(svn_min__opt_state_t *opt_state)
+{
+ return opt_state->remove_obsoletes;
+}
+
+/* Based on the operation selected in OPT_STATE, return a descriptive
+ * string of what we plan to do. Allocate that string in RESULT_POOL. */
+static const char *
+processing_title(svn_min__opt_state_t *opt_state,
+ apr_pool_t *result_pool)
+{
+ svn_stringbuf_t *result = svn_stringbuf_create_empty(result_pool);
+ if (opt_state->remove_obsoletes)
+ svn_stringbuf_appendcstr(result, _("Removing obsolete branches"));
+
+ if (opt_state->remove_redundants)
+ {
+ if (svn_stringbuf_isempty(result))
+ svn_stringbuf_appendcstr(result, _("Removing redundant mergeinfo"));
+ else
+ svn_stringbuf_appendcstr(result, _(" and redundant mergeinfo"));
+ }
+
+ if (opt_state->combine_ranges)
+ {
+ if (svn_stringbuf_isempty(result))
+ svn_stringbuf_appendcstr(result, _("Combining revision ranges"));
+ else
+ svn_stringbuf_appendcstr(result, _(", combining revision ranges"));
+ }
+
+ svn_stringbuf_appendcstr(result, " ...\n");
+ return result->data;
+}
+
+/* Sort paths in PATHS and remove all paths whose ancestors are also in
+ * PATHS. */
+static void
+eliminate_subpaths(apr_array_header_t *paths)
+{
+ int source, dest;
+ if (paths->nelts < 2)
+ return;
+
+ svn_sort__array(paths, svn_sort_compare_paths);
+
+ for (source = 1, dest = 0; source < paths->nelts; ++source)
+ {
+ const char *source_path = APR_ARRAY_IDX(paths, source, const char *);
+ const char *dest_path = APR_ARRAY_IDX(paths, dest, const char *);
+
+ if (!svn_dirent_is_ancestor(dest_path, source_path))
+ {
+ ++dest;
+ APR_ARRAY_IDX(paths, dest, const char *) = source_path;
+ }
+ }
+
+ paths->nelts = dest + 1;
+}
+
+/* If enabled by OPT_STATE, show the list of missing paths encountered by
+ * LOOKUP and use LOG to determine their fate. LOG may be NULL.
+ * Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+show_obsoletes_summary(svn_min__branch_lookup_t *lookup,
+ svn_min__log_t *log,
+ svn_min__opt_state_t *opt_state,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *paths;
+ apr_pool_t *iterpool;
+ int i;
+
+ /* Skip when summary has not been enabled */
+ if (!opt_state->run_analysis || !opt_state->remove_obsoletes)
+ return SVN_NO_ERROR;
+
+ /* Get list of all missing paths. Early exist if there are none. */
+ paths = svn_min__branch_deleted_list(lookup, scratch_pool, scratch_pool);
+ if (!paths->nelts)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("\nNo missing branches were detected.\n\n")));
+ return SVN_NO_ERROR;
+ }
+
+ /* Process them all. */
+ iterpool = svn_pool_create(scratch_pool);
+
+ SVN_ERR(svn_cmdline_printf(iterpool,
+ _("\nEncountered %d missing branch(es):\n"),
+ paths->nelts));
+ for (i = 0; i < paths->nelts; ++i)
+ {
+ svn_revnum_t deletion_rev;
+ apr_array_header_t *surviving_copies = NULL;
+ const char *path = APR_ARRAY_IDX(paths, i, const char *);
+
+ /* For PATH, gather deletion and copy survival info. */
+ svn_pool_clear(iterpool);
+ surviving_copies = apr_array_make(iterpool, 16, sizeof(const char *));
+ if (log)
+ {
+ /* Look for a deletion since the last creation
+ * (or parent creation). */
+ svn_revnum_t creation_rev = svn_min__find_copy(log, path,
+ SVN_INVALID_REVNUM,
+ 0, iterpool);
+ deletion_rev = svn_min__find_deletion(log, path,
+ SVN_INVALID_REVNUM,
+ creation_rev, iterpool);
+ find_surviving_copies(surviving_copies, log, path,
+ SVN_IS_VALID_REVNUM(deletion_rev)
+ ? deletion_rev - 1
+ : deletion_rev,
+ creation_rev,
+ scratch_pool, scratch_pool);
+ }
+ else
+ {
+ deletion_rev = SVN_INVALID_REVNUM;
+ }
+
+ /* Show state / results to the extend we've got them. */
+ if (surviving_copies->nelts)
+ {
+ int k;
+
+ /* There maybe thousands of surviving (sub-node) copies.
+ * Restrict the output unless the user asked us to be verbose. */
+ int limit = opt_state->verbose ? INT_MAX : 4;
+
+ /* Reasonably reduce the output. */
+ eliminate_subpaths(surviving_copies);
+ SVN_ERR(svn_cmdline_printf(iterpool,
+ _(" [r%ld, copied or moved] %s\n"),
+ deletion_rev, path));
+ for (k = 0; k < surviving_copies->nelts && k < limit; ++k)
+ {
+ path = APR_ARRAY_IDX(surviving_copies, k, const char *);
+ SVN_ERR(svn_cmdline_printf(iterpool,
+ _(" -> %s\n"),
+ path));
+ }
+
+ if (k < surviving_copies->nelts)
+ SVN_ERR(svn_cmdline_printf(iterpool,
+ _(" (and %d more)\n"),
+ surviving_copies->nelts - k));
+ }
+ else if (SVN_IS_VALID_REVNUM(deletion_rev))
+ SVN_ERR(svn_cmdline_printf(iterpool, _(" [r%ld] %s\n"),
+ deletion_rev, path));
+ else if (log)
+ SVN_ERR(svn_cmdline_printf(iterpool, _(" [potential branch] %s\n"),
+ path));
+ else
+ SVN_ERR(svn_cmdline_printf(iterpool, _(" %s\n"), path));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Set the path and url members in BATON to handle the IDX-th target
+ * specified at the command line. Allocate the paths in RESULT_POOL and
+ * use SCRATCH_POOL for temporaries. */
+static svn_error_t *
+add_wc_info(svn_min__cmd_baton_t *baton,
+ int idx,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_min__opt_state_t *opt_state = baton->opt_state;
+ const char *target = APR_ARRAY_IDX(opt_state->targets, idx, const char *);
+ const char *truepath;
+ svn_opt_revision_t peg_revision;
+
+ if (svn_path_is_url(target))
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("'%s' is not a local path"), target);
+
+ SVN_ERR(svn_opt_parse_path(&peg_revision, &truepath, target,
+ scratch_pool));
+ SVN_ERR(svn_dirent_get_absolute(&baton->local_abspath, truepath,
+ result_pool));
+
+ SVN_ERR(svn_client_get_wc_root(&baton->wc_root, baton->local_abspath,
+ baton->ctx, result_pool, scratch_pool));
+ SVN_ERR(svn_client_get_repos_root(&baton->repo_root, NULL,
+ baton->local_abspath, baton->ctx,
+ result_pool, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Set *URL to a URL within CMD_BATON's repository that covers all FS paths
+ * in WC_MERGEINFO. Use SESSION to access the repository. Allocate *URL
+ * in RESULT_POOL and use SCRATCH_POOL for temporary allocations.
+ */
+static svn_error_t *
+get_url(const char **url,
+ apr_array_header_t *wc_mergeinfo,
+ svn_ra_session_t *session,
+ svn_min__cmd_baton_t *cmd_baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ /* This is the deepest FS path that we may use. */
+ const char *path = svn_min__common_parent(wc_mergeinfo, scratch_pool,
+ scratch_pool);
+ SVN_ERR_ASSERT(*path == '/');
+ ++path;
+
+ /* While we are not at the repository root, check that PATH actually
+ * exists @HEAD. If it doesn't retry with its parent. */
+ while (strlen(path))
+ {
+ svn_node_kind_t kind;
+ SVN_ERR(svn_ra_check_path(session, path, SVN_INVALID_REVNUM, &kind,
+ scratch_pool));
+ if (kind != svn_node_none)
+ break;
+
+ path = svn_dirent_dirname(path, scratch_pool);
+ }
+
+ /* Construct the result. */
+ *url = svn_path_url_add_component2(cmd_baton->repo_root, path,
+ result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__run_normalize(void *baton,
+ apr_pool_t *pool)
+{
+ svn_min__cmd_baton_t *cmd_baton = baton;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ apr_pool_t *subpool = svn_pool_create(pool);
+ int i;
+
+ for (i = 0; i < cmd_baton->opt_state->targets->nelts; i++)
+ {
+ apr_array_header_t *wc_mergeinfo;
+ svn_min__log_t *log = NULL;
+ svn_ra_session_t *session = NULL;
+ svn_min__branch_lookup_t *lookup = cmd_baton->lookup;
+
+ /* next target */
+ svn_pool_clear(iterpool);
+ SVN_ERR(add_wc_info(baton, i, iterpool, subpool));
+
+ /* scan working copy */
+ svn_pool_clear(subpool);
+ SVN_ERR(svn_min__read_mergeinfo(&wc_mergeinfo, cmd_baton, iterpool,
+ subpool));
+
+ /* Any mergeinfo at all? */
+ if (wc_mergeinfo->nelts == 0)
+ continue;
+
+ /* Open RA session. Even if we don't need it for LOOKUP, checking
+ * the url for the LOG will require the session object. */
+ if ( (!lookup && needs_session(cmd_baton->opt_state))
+ || needs_log(cmd_baton->opt_state))
+ {
+ svn_pool_clear(subpool);
+ SVN_ERR(add_wc_info(baton, i, iterpool, subpool));
+ SVN_ERR(svn_client_open_ra_session2(&session, cmd_baton->repo_root,
+ NULL, cmd_baton->ctx, iterpool,
+ subpool));
+ if (!lookup)
+ lookup = svn_min__branch_lookup_create(session, iterpool);
+ }
+
+ /* fetch log */
+ if (needs_log(cmd_baton->opt_state))
+ {
+ const char *url;
+
+ svn_pool_clear(subpool);
+ SVN_ERR(get_url(&url, wc_mergeinfo, session, cmd_baton, subpool,
+ subpool));
+ SVN_ERR(svn_min__log(&log, url, cmd_baton, iterpool, subpool));
+ }
+
+ /* actual normalization */
+ svn_pool_clear(subpool);
+ if (!cmd_baton->opt_state->quiet)
+ SVN_ERR(svn_cmdline_fputs(processing_title(cmd_baton->opt_state,
+ subpool),
+ stdout, subpool));
+
+ SVN_ERR(normalize(wc_mergeinfo, log, lookup, cmd_baton->opt_state,
+ subpool));
+
+ /* write results to disk */
+ svn_pool_clear(subpool);
+ if (!cmd_baton->opt_state->dry_run)
+ SVN_ERR(svn_min__write_mergeinfo(cmd_baton, wc_mergeinfo, subpool));
+
+ SVN_ERR(svn_min__remove_empty_mergeinfo(wc_mergeinfo));
+
+ /* Show a summary of deleted branches. */
+ SVN_ERR(show_obsoletes_summary(lookup, log, cmd_baton->opt_state,
+ iterpool));
+
+ /* show results */
+ if (!cmd_baton->opt_state->quiet)
+ {
+ SVN_ERR(svn_cmdline_printf(subpool, _("\nRemaining mergeinfo:\n")));
+ SVN_ERR(svn_min__print_mergeinfo_stats(wc_mergeinfo, subpool));
+ }
+ }
+
+ svn_pool_destroy(subpool);
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/mergeinfo-normalizer.h b/tools/client-side/svn-mergeinfo-normalizer/mergeinfo-normalizer.h
new file mode 100644
index 0000000..8afff31
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/mergeinfo-normalizer.h
@@ -0,0 +1,398 @@
+/*
+ * mergeinfo-normalizer.h: tool-global functions and structures.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+#ifndef SVN_MERGEINFO_NORMALIZER_H
+#define SVN_MERGEINFO_NORMALIZER_H
+
+/*** Includes. ***/
+#include <apr_tables.h>
+#include <apr_getopt.h>
+
+#include "svn_client.h"
+#include "svn_opt.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*** Command dispatch. ***/
+
+/* Hold results of option processing that are shared by multiple
+ commands. */
+typedef struct svn_min__opt_state_t
+{
+ /* After option processing is done, reflects the switch actually
+ given on the command line, or svn_depth_unknown if none. */
+ svn_depth_t depth;
+
+ svn_boolean_t quiet; /* sssh...avoid unnecessary output */
+ svn_boolean_t version; /* print version information */
+ svn_boolean_t verbose; /* be verbose */
+ svn_boolean_t help; /* print usage message */
+ const char *auth_username; /* auth username */
+ const char *auth_password; /* auth password */
+ apr_array_header_t *targets;
+ svn_boolean_t no_auth_cache; /* do not cache authentication information */
+ svn_boolean_t dry_run; /* try operation but make no changes */
+ const char *config_dir; /* over-riding configuration directory */
+ apr_array_header_t *config_options; /* over-riding configuration options */
+ svn_stringbuf_t *filedata; /* contents read from --file argument */
+
+ /* Selected normalization operations. */
+ svn_boolean_t remove_obsoletes;
+ svn_boolean_t combine_ranges;
+ svn_boolean_t remove_redundants;
+ svn_boolean_t remove_redundant_misaligned;
+ svn_boolean_t run_analysis;
+
+ /* trust server SSL certs that would otherwise be rejected as "untrusted" */
+ svn_boolean_t trust_server_cert_unknown_ca;
+ svn_boolean_t trust_server_cert_cn_mismatch;
+ svn_boolean_t trust_server_cert_expired;
+ svn_boolean_t trust_server_cert_not_yet_valid;
+ svn_boolean_t trust_server_cert_other_failure;
+ svn_boolean_t allow_mixed_rev; /* Allow operation on mixed-revision WC */
+ svn_boolean_t non_interactive;
+} svn_min__opt_state_t;
+
+/* Opaque structure allowing to check efficiently whether a given path
+ * exists in the repository @HEAD. */
+typedef struct svn_min__branch_lookup_t svn_min__branch_lookup_t;
+
+/* Type of the baton passed to any of our sub-commands. */
+typedef struct svn_min__cmd_baton_t
+{
+ /* Preprocessed line options. */
+ svn_min__opt_state_t *opt_state;
+
+ /* Client context. */
+ svn_client_ctx_t *ctx;
+
+ /* Base path of the directory tree currently being processed. */
+ const char *local_abspath;
+
+ /* Working copy root path of LOCAL_ABSPATH. */
+ const char *wc_root;
+
+ /* Root of the corresponding working copy. */
+ const char *repo_root;
+
+ /* If the sub-command, e.g. the local lookup only 'remove-branches',
+ * needs a specific repository lookup data structure, set it here.
+ * If this is NULL, the sub-command will use remove lookup to REPO_ROOT. */
+ svn_min__branch_lookup_t *lookup;
+} svn_min__cmd_baton_t;
+
+
+/* Declare all the command procedures */
+svn_opt_subcommand_t
+ svn_min__help,
+ svn_min__normalize,
+ svn_min__analyze,
+ svn_min__remove_branches;
+
+/* See definition in svn.c for documentation. */
+extern const svn_opt_subcommand_desc2_t svn_min__cmd_table[];
+
+/* See definition in svn.c for documentation. */
+extern const int svn_min__global_options[];
+
+/* See definition in svn.c for documentation. */
+extern const apr_getopt_option_t svn_min__options[];
+
+
+/* Our cancellation callback. */
+svn_error_t *
+svn_min__check_cancel(void *baton);
+
+
+/*** Internal API linking the various modules. ***/
+
+/* Scan the working copy sub-tree specified in BATON for mergeinfo and
+ * return them in *RESULT, allocated in RESULT_POOL. The element type is
+ * opaque. Use SCRATCH_POOL for temporary allocations. */
+svn_error_t *
+svn_min__read_mergeinfo(apr_array_header_t **result,
+ svn_min__cmd_baton_t *baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* For the MERGEINFO as returned by svn_min__read_mergeinfo() return the
+ * FS path that is parent to the working copy and all branches mentioned
+ * in the mergeinfo. Allocate the return value in RESULT_POOL and use
+ * SCRATCH_POOL for temporaries. */
+const char *
+svn_min__common_parent(apr_array_header_t *mergeinfo,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Return the mergeinfo at index IDX in MERGEINFO.
+ * IDX must be 0 .. MERGEINFO->NELTS-1. */
+svn_mergeinfo_t
+svn_min__get_mergeinfo(apr_array_header_t *mergeinfo,
+ int idx);
+
+/* Return the full info on the mergeinfo at IDX in MERGEINFO. Set *FS_PATH
+ * to the FS path of the respective working copy node, *SUBTREE_RELPATH to
+ * its local absolute path and *PARENT_PATH to the local absolute path of
+ * the working copy node that carries the closest parent mergeinfo.
+ * return the working. Set *SUBTREE_MERGEINFO to the parsed mergeinfo at
+ * *SUBTREE_RELPATH and *PARENT_MERGEINFO to the parsed mergeinfo at
+ * *PARENT_PATH. In *SIBLING_MERGEINFO return the list of immediate sub-node
+ * mergeinfo below *PARENT_PATH, including the *SUBTREE_MERGEINFO.
+ *
+ * If there is no parent mergeinfo, *PARENT_PATH will be "" and
+ * *PARENT_MERGEINFO will be NULL. If IDX is not a valid array index,
+ * "" will be returned for all paths and all mergeinfo will be NULL.
+ *
+ * Note that the returned data is shared with MERGEINFO and has the same
+ * lifetime. It is perfectly legal to modify the svn_mergeinfo_t hashes
+ * and store the result using svn_min__write_mergeinfo. */
+void
+svn_min__get_mergeinfo_pair(const char **fs_path,
+ const char **parent_path,
+ const char **subtree_relpath,
+ svn_mergeinfo_t *parent_mergeinfo,
+ svn_mergeinfo_t *subtree_mergeinfo,
+ apr_array_header_t **siblings_mergeinfo,
+ apr_array_header_t *mergeinfo,
+ int idx);
+
+/* Search SIBLING_MERGEINFO for mergeinfo that intersects PARENT_PATH
+ * and RELEVANT_RANGES. Return the FS path to range list hash in
+ * *SIBLING_RANGES, allocated in RESULT_POOL. Use SCRATCH_POOL for
+ * temporary allocations
+ */
+svn_error_t *
+svn_min__sibling_ranges(apr_hash_t **sibling_ranges,
+ apr_array_header_t *sibling_mergeinfo,
+ const char *parent_path,
+ svn_rangelist_t *relevant_ranges,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Store the MERGEINFO in the working copy specified by BATON. Delete
+ * the mergeinfo on those nodes where it is empty but keep the empty data
+ * in MERGEINFO. Use SCRATCH_POOL for temporary allocations. */
+svn_error_t *
+svn_min__write_mergeinfo(svn_min__cmd_baton_t *baton,
+ apr_array_header_t *mergeinfo,
+ apr_pool_t *scratch_pool);
+
+/* Remove entries with empty mergeinfo from MERGEINFO. */
+svn_error_t *
+svn_min__remove_empty_mergeinfo(apr_array_header_t *mergeinfo);
+
+/* Print statistics for WC_MERGEINFO to console. Use SCRATCH_POOL for
+ * temporaries. */
+svn_error_t *
+svn_min__print_mergeinfo_stats(apr_array_header_t *wc_mergeinfo,
+ apr_pool_t *scratch_pool);
+
+/* Opaque data structure containing the log / history downloaded from the
+ * repository. */
+typedef struct svn_min__log_t svn_min__log_t;
+
+/* Data structure describing a copy operation as part of svn_min__log_t. */
+typedef struct svn_min__copy_t
+{
+ /* Copy target FS path. */
+ const char *path;
+
+ /* Copy target revision. */
+ svn_revnum_t revision;
+
+ /* Copy source FS path. */
+ const char *copyfrom_path;
+
+ /* Copy source revision. */
+ svn_revnum_t copyfrom_revision;
+} svn_min__copy_t;
+
+/* Fetch the full *LOG for the given URL using the context in BATON.
+ * Allocate *LOG in RESULT_POOL and use SCRATCH_POOL for temporaries. */
+svn_error_t *
+svn_min__log(svn_min__log_t **log,
+ const char *url,
+ svn_min__cmd_baton_t *baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Scan LOG and determine what revisions in RANGES actually operate on PATH
+ * or its sub-nodes. Return those revisions, allocated in RESULT_POOL.
+ * Note that parent path changes don't count as operative within PATH. */
+svn_rangelist_t *
+svn_min__operative(svn_min__log_t *log,
+ const char *path,
+ svn_rangelist_t *ranges,
+ apr_pool_t *result_pool);
+
+/* Scan LOG and determine what revisions in RANGES are operative on PATH
+ * but outside SUBTREE (possibly but not exclusively modifying paths within
+ * SUBTREE). Return those revisions, allocated in RESULT_POOL. */
+svn_rangelist_t *
+svn_min__operative_outside_subtree(svn_min__log_t *log,
+ const char *path,
+ const char *subtree,
+ svn_rangelist_t *ranges,
+ apr_pool_t *result_pool);
+
+/* Scan LOG and return those revisions from RANGES that have changes
+ * operative on the PATH subtree and where at least one of these changes
+ * are not covered by any entry in SIBLING_RANGES.
+ *
+ * Allocate the result in RESULT_POOL and use SCRATCH_POOL for tempoaries.
+ */
+svn_rangelist_t *
+svn_min__operative_outside_all_subtrees(svn_min__log_t *log,
+ const char *path,
+ svn_rangelist_t *ranges,
+ apr_hash_t *sibling_ranges,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Scan LOG from START_REV down to END_REV and find the latest deletion of
+ * PATH or a parent thereof and return the revision that contains the
+ * deletion. Return SVN_INVALID_REVNUM if no such deletion could be found.
+ * Use SCRATCH_POOL for temporaries. */
+svn_revnum_t
+svn_min__find_deletion(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *scratch_pool);
+
+/* Scan LOG for deletions of PATH or any of its parents. Return an array,
+ * allocated in RESULT_POOL, containing all svn_revnum_t that contain such
+ * deletions in ascending order. Use SCRATCH_POOL for temporaries. */
+apr_array_header_t *
+svn_min__find_deletions(svn_min__log_t *log,
+ const char *path,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Scan LOG for the latest copy of PATH or any of its parents no later than
+ * START_REV and no earlier than END_REV. Return SVN_INVALID_REVNUM if no
+ * such revision exists. Use SCRATCH_POOL for temporaries. */
+svn_revnum_t
+svn_min__find_copy(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *scratch_pool);
+
+/* Scan LOG for copies of PATH, any of its parents or sub-nodes made from
+ * revisions no later than START_REV and no earlier than END_REV. Return
+ * those copies as svn_min__copy_t* in an array allocated in RESULT_POOL.
+ * The copy objects themselves are shared with LOG. Use SCRATCH_POOL
+ * for temporary allocations. */
+apr_array_header_t *
+svn_min__get_copies(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Return the opaque history object for PATH, starting at START_REV and
+ * going back to its initial creation or END_REV - whichever is latest.
+ *
+ * The history is a sequence of segments, each describing that the node
+ * existed at a given path for a given range of revisions. Between two
+ * segments, there must have been a copy operation.
+ *
+ * Allocate the result in RESULT_POOL and use SCRATCH_POOL for temporaries.
+ */
+apr_array_header_t *
+svn_min__get_history(svn_min__log_t *log,
+ const char *path,
+ svn_revnum_t start_rev,
+ svn_revnum_t end_rev,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Return the (potentially empty) parts of LHS and RHS where their history
+ * overlaps, i.e. those (partial) history segments where they have the same
+ * path for in the same revisions. Allocate the result in RESULT_POOL. */
+apr_array_header_t *
+svn_min__intersect_history(apr_array_header_t *lhs,
+ apr_array_header_t *rhs,
+ apr_pool_t *result_pool);
+
+/* Return the revision ranges that are covered by the segments in HISTORY.
+ * Allocate the result in RESULT_POOL. */
+svn_rangelist_t *
+svn_min__history_ranges(apr_array_header_t *history,
+ apr_pool_t *result_pool);
+
+/* Allocate a new path lookup object in RESULT_POOL and make it use SESSION
+ * for any future repository lookups. */
+svn_min__branch_lookup_t *
+svn_min__branch_lookup_create(svn_ra_session_t *session,
+ apr_pool_t *result_pool);
+
+/* Allocate a new path lookup object in RESULT_POOL and set the list of
+ * missing paths to PATHS. This object will never contact the repository. */
+svn_min__branch_lookup_t *
+svn_min__branch_lookup_from_paths(apr_array_header_t *paths,
+ apr_pool_t *result_pool);
+
+/* Set *DELETED to TRUE, if we can confirm using LOOKUP that BRANCH does
+ * not exist @HEAD. If LOCAL_ONLY is set or if LOOKUP has not been created
+ * with a repository session, base that judgement on information in LOOKUP
+ * alone and report FALSE for unknonw path. Otherwise contact the
+ * repository for unknown paths and store the result in LOOKUP.
+ * Use SCRATCH_POOL for temporary allocations. */
+svn_error_t *
+svn_min__branch_lookup(svn_boolean_t *deleted,
+ svn_min__branch_lookup_t *lookup,
+ const char *branch,
+ svn_boolean_t local_only,
+ apr_pool_t *scratch_pool);
+
+/* Return an array of const char *, allocated in RESULT_POOL, of all deleted
+ * FS paths we encountered using LOOKUP. We only return the respective
+ * top-most missing paths - not any of their sub-nodes. Use SCRATCH_POOL
+ * for temporary allocations. */
+apr_array_header_t *
+svn_min__branch_deleted_list(svn_min__branch_lookup_t *lookup,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Run our common processing code shared between all sub-commands.
+ * Take the sub-command behaviour from the flags in BATON. */
+svn_error_t *
+svn_min__run_normalize(void *baton,
+ apr_pool_t *pool);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVN_MERGEINFO_NORMALIZER_H */
diff --git a/tools/client-side/svn-mergeinfo-normalizer/missing-branches.c b/tools/client-side/svn-mergeinfo-normalizer/missing-branches.c
new file mode 100644
index 0000000..30cb429
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/missing-branches.c
@@ -0,0 +1,365 @@
+/*
+ * missing-branches.c -- Efficiently scan for missing branches.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include <assert.h>
+
+#include "svn_hash.h"
+#include "svn_pools.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_subr_private.h"
+
+#include "mergeinfo-normalizer.h"
+
+
+/*** Code. ***/
+
+struct svn_min__branch_lookup_t
+{
+ /* Connection to the repository where we are looking for paths.
+ If this is NULL, then only local lookups may be performed. */
+ svn_ra_session_t *session;
+
+ /* Keyed by const char * FS paths that are known not to exist.
+ It is implied that sub-paths won't and can't exist either. */
+ apr_hash_t *deleted;
+
+ /* Keyed by const char * FS paths that are known to exist. */
+ apr_hash_t *existing;
+};
+
+/* Return the location of the last '/' in PATH before LEN.
+ Return 0 for root and empty paths. PATH must be a canonical FS path. */
+static apr_size_t
+parent_segment(const char *path,
+ apr_size_t len)
+{
+ assert(path[0] == '/');
+
+ if (len <= 1)
+ return 0;
+
+ --len;
+ while (path[len] != '/')
+ --len;
+
+ return len;
+}
+
+/* Look for BRANCH in LOOKUP without connecting to the server. Return
+ * svn_tristate_true, if it is known to exist, svn_tristate_false if it is
+ * known to not exist. Otherwise return svn_tristate_unknown. */
+static svn_tristate_t
+local_lookup(const svn_min__branch_lookup_t *lookup,
+ const char *branch)
+{
+ apr_size_t len;
+
+ /* Non-canonical paths are bad but we let the remote lookup take care of
+ * them. Our hashes simply have no info on them. */
+ if (branch[0] != '/')
+ return svn_tristate_unknown;
+
+ /* Hard-coded: "/" always exists. */
+ if (branch[1] == '\0')
+ return svn_tristate_true;
+
+ /* For every existing path that we encountered, there is an entry in the
+ EXISITING hash. So, we can just use that. */
+ len = strlen(branch);
+ if (apr_hash_get(lookup->existing, branch, len))
+ return svn_tristate_true;
+
+ /* Not known to exist and might be known to not exist. We only record
+ the top level deleted directory for DELETED branches, so we need to
+ walk up the path until we either find that deletion or an existing
+ path. In the latter case, we don't know what happened to the levels
+ below that, including BRANCH. */
+ while (len > 0)
+ {
+ /* Known deleted? Note that we checked BRANCH for existence but not
+ for deletion, yet. */
+ if (apr_hash_get(lookup->deleted, branch, len))
+ return svn_tristate_false;
+
+ /* Parent known to exist?
+ Then, we don't know what happened to the BRANCH. */
+ len = parent_segment(branch, len);
+
+ if (apr_hash_get(lookup->existing, branch, len))
+ return svn_tristate_unknown;
+ }
+
+ /* We don't know. */
+ return svn_tristate_unknown;
+}
+
+/* Set *DELETED to TRUE, if PATH can't be found at HEAD in SESSION.
+ Use SCRATCH_POOL for temporary allocations. */
+static svn_error_t *
+path_deleted(svn_boolean_t *deleted,
+ svn_ra_session_t *session,
+ const char *path,
+ apr_pool_t *scratch_pool)
+{
+ svn_node_kind_t kind;
+
+ SVN_ERR_ASSERT(*path == '/');
+ SVN_ERR(svn_ra_check_path(session, path + 1, SVN_INVALID_REVNUM, &kind,
+ scratch_pool));
+ *deleted = kind == svn_node_none;
+
+ return SVN_NO_ERROR;
+}
+
+/* Chop the last segment off PATH. PATH must be a canonical FS path.
+ No-op for the root path. */
+static void
+to_parent(svn_stringbuf_t *path)
+{
+ path->len = parent_segment(path->data, path->len);
+ if (path->len == 0)
+ path->len = 1;
+
+ path->data[path->len] = '\0';
+}
+
+/* Contact the repository used by LOOKUP and set *DELETED to TRUE, if path
+ BRANCH does not exist at HEAD. Cache the lookup results in LOOKUP and
+ use SCRATCH_POOL for temporary allocations. Call this only if
+ local_lookup returned svn_tristate_unknown. */
+static svn_error_t *
+remote_lookup(svn_boolean_t *deleted,
+ const svn_min__branch_lookup_t *lookup,
+ const char *branch,
+ apr_pool_t *scratch_pool)
+{
+ svn_stringbuf_t *path = svn_stringbuf_create(branch, scratch_pool);
+
+ /* We shall call this function only after the local lookup failed. */
+ assert(local_lookup(lookup, branch) == svn_tristate_unknown);
+
+ /* Actual repository lookup. */
+ SVN_ERR(path_deleted(deleted, lookup->session, branch, scratch_pool));
+
+ /* If the path did not exist, store the furthest non-existent parent. */
+ if (*deleted)
+ {
+ apr_pool_t *iterpool;
+ svn_boolean_t is_deleted;
+ const char *deleted_path;
+ apr_size_t len;
+
+ /* Find the closest parent that exists. Often, that is something like
+ "branches" and the next level already does not exist. So, use that
+ as a heuristics to minimize the number of lookups. */
+
+ /* Set LEN to the length of the last unknown to exist sub-path. */
+ svn_stringbuf_t *temp = svn_stringbuf_dup(path, scratch_pool);
+ do
+ {
+ len = temp->len;
+ to_parent(temp);
+ }
+ while (local_lookup(lookup, temp->data) != svn_tristate_true);
+
+ /* Check whether that path actually does not exist. */
+ if (len == path->len)
+ {
+ /* We already know that the full PATH does not exist.
+ We get here if the immediate parent of PATH is known to exist. */
+ is_deleted = TRUE;
+ }
+ else
+ {
+ temp = svn_stringbuf_ncreate(branch, len, scratch_pool);
+ SVN_ERR(path_deleted(&is_deleted, lookup->session, temp->data,
+ scratch_pool));
+ }
+
+ /* Whether or not that path does not exist, we know now and should
+ store that in LOOKUP. */
+ if (is_deleted)
+ {
+ /* We are almost done here. The existing parent is already in
+ LOOKUP and we only need to add the deleted path. */
+ deleted_path = apr_pstrmemdup(apr_hash_pool_get(lookup->deleted),
+ branch, len);
+ apr_hash_set(lookup->deleted, deleted_path, len, deleted_path);
+
+ return SVN_NO_ERROR;
+ }
+ else
+ {
+ /* We just learned that TEMP does exist. Remember this fact and
+ later continue the search for the deletion boundary. */
+ const char *hash_path
+ = apr_pstrmemdup(apr_hash_pool_get(lookup->existing), temp->data,
+ temp->len);
+
+ /* Only add HASH_PATH. Its parents are already in that hash. */
+ apr_hash_set(lookup->existing, hash_path, temp->len, hash_path);
+ }
+
+ /* Find the closest parent that does exist.
+ "/" exists, hence, this will terminate. */
+ iterpool = svn_pool_create(scratch_pool);
+ do
+ {
+ svn_pool_clear(iterpool);
+
+ len = path->len;
+ to_parent(path);
+
+ /* We often know that "/branches" etc. exist. So, we can skip
+ the final lookup in that case. */
+ if (local_lookup(lookup, path->data) == svn_tristate_true)
+ break;
+
+ /* Get the info from the repository. */
+ SVN_ERR(path_deleted(&is_deleted, lookup->session, path->data,
+ iterpool));
+ }
+ while (is_deleted);
+ svn_pool_destroy(iterpool);
+
+ /* PATH exists, it's sub-path of length LEN does not. */
+ deleted_path = apr_pstrmemdup(apr_hash_pool_get(lookup->deleted),
+ branch, len);
+ apr_hash_set(lookup->deleted, deleted_path, len, deleted_path);
+ }
+
+ /* PATH and all its parents exist. Add them to the EXISITING hash.
+ Make sure to allocate only the longest path and then reference
+ sub-sequences of it to keep memory usage in check. */
+ if (!apr_hash_get(lookup->existing, path->data, path->len))
+ {
+ const char *hash_path
+ = apr_pstrmemdup(apr_hash_pool_get(lookup->existing), path->data,
+ path->len);
+
+ /* Note that we don't need to check for exiting entries here because
+ the APR hash will reuse existing nodes and we are not allocating
+ anything else here. So, this does not allocate duplicate nodes. */
+ for (; path->len > 1; to_parent(path))
+ apr_hash_set(lookup->existing, hash_path, path->len, hash_path);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+svn_min__branch_lookup_t *
+svn_min__branch_lookup_create(svn_ra_session_t *session,
+ apr_pool_t *result_pool)
+{
+ svn_min__branch_lookup_t *result = apr_pcalloc(result_pool,
+ sizeof(*result));
+ result->session = session;
+ result->deleted = svn_hash__make(result_pool);
+ result->existing = svn_hash__make(result_pool);
+
+ return result;
+}
+
+svn_min__branch_lookup_t *
+svn_min__branch_lookup_from_paths(apr_array_header_t *paths,
+ apr_pool_t *result_pool)
+{
+ svn_min__branch_lookup_t *result
+ = svn_min__branch_lookup_create(NULL, result_pool);
+
+ int i;
+ for (i = 0; i < paths->nelts; ++i)
+ {
+ const char *path = APR_ARRAY_IDX(paths, i, const char *);
+ if (strlen(path) > 0)
+ {
+ path = apr_pstrdup(result_pool, path);
+ svn_hash_sets(result->deleted, path, path);
+ }
+ }
+
+ return result;
+}
+
+svn_error_t *
+svn_min__branch_lookup(svn_boolean_t *deleted,
+ svn_min__branch_lookup_t *lookup,
+ const char *branch,
+ svn_boolean_t local_only,
+ apr_pool_t *scratch_pool)
+{
+ switch (local_lookup(lookup, branch))
+ {
+ case svn_tristate_false:
+ *deleted = TRUE;
+ return SVN_NO_ERROR;
+
+ case svn_tristate_true:
+ *deleted = FALSE;
+ return SVN_NO_ERROR;
+
+ default:
+ /* If the state is unknown and we are only allowed to do a local
+ lookup, default to a possible false negative. Note that not
+ having the session available implies local-only lookup. */
+ if (local_only || !lookup->session)
+ {
+ *deleted = FALSE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ return svn_error_trace(remote_lookup(deleted, lookup, branch,
+ scratch_pool));
+}
+
+apr_array_header_t *
+svn_min__branch_deleted_list(svn_min__branch_lookup_t *lookup,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *result = apr_array_make(result_pool,
+ apr_hash_count(lookup->deleted),
+ sizeof(const char *));
+ apr_hash_index_t *hi;
+ for (hi = apr_hash_first(scratch_pool, lookup->deleted);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const char *path = apr_hash_this_key(hi);
+ apr_size_t len = apr_hash_this_key_len(hi);
+
+ APR_ARRAY_PUSH(result, const char *) = apr_pstrmemdup(result_pool,
+ path, len);
+ }
+
+ svn_sort__array(result, svn_sort_compare_paths);
+
+ return result;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/normalize-cmd.c b/tools/client-side/svn-mergeinfo-normalizer/normalize-cmd.c
new file mode 100644
index 0000000..677d1ca
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/normalize-cmd.c
@@ -0,0 +1,53 @@
+/*
+ * normalize-cmd.c -- Elide mergeinfo from sub-nodes
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "mergeinfo-normalizer.h"
+
+
+/*** Code. ***/
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+svn_error_t *
+svn_min__normalize(apr_getopt_t *os,
+ void *baton,
+ apr_pool_t *pool)
+{
+ svn_min__cmd_baton_t *cmd_baton = baton;
+
+ /* If no option is given, default to "remove redundant sub-node m/i". */
+ if ( !cmd_baton->opt_state->remove_redundants
+ && !cmd_baton->opt_state->remove_obsoletes
+ && !cmd_baton->opt_state->combine_ranges
+ && !cmd_baton->opt_state->remove_redundant_misaligned)
+ cmd_baton->opt_state->remove_redundants = TRUE;
+
+ SVN_ERR(svn_min__run_normalize(baton, pool));
+
+ return SVN_NO_ERROR;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/remove-branches-cmd.c b/tools/client-side/svn-mergeinfo-normalizer/remove-branches-cmd.c
new file mode 100644
index 0000000..ccdee84
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/remove-branches-cmd.c
@@ -0,0 +1,59 @@
+/*
+ * remove-branches-cmd.c -- Remove specific branch entries from all mergeinfo
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "mergeinfo-normalizer.h"
+
+#include "svn_private_config.h"
+
+
+/*** Code. ***/
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+svn_error_t *
+svn_min__remove_branches(apr_getopt_t *os,
+ void *baton,
+ apr_pool_t *pool)
+{
+ apr_array_header_t *branches;
+ svn_min__cmd_baton_t *cmd_baton = baton;
+
+ if (! cmd_baton->opt_state->filedata)
+ return svn_error_create(SVN_ERR_INCORRECT_PARAMS, NULL,
+ _("Parameter --file not given"));
+
+ branches = svn_cstring_split(cmd_baton->opt_state->filedata->data,
+ "\n\r", FALSE, pool);
+
+ cmd_baton->opt_state->remove_obsoletes = TRUE;
+ cmd_baton->lookup = svn_min__branch_lookup_from_paths(branches, pool);
+
+ SVN_ERR(svn_min__run_normalize(baton, pool));
+
+ return SVN_NO_ERROR;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/svn-mergeinfo-normalizer.c b/tools/client-side/svn-mergeinfo-normalizer/svn-mergeinfo-normalizer.c
new file mode 100644
index 0000000..c0c7fc3
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/svn-mergeinfo-normalizer.c
@@ -0,0 +1,974 @@
+/*
+ * svn-mergeinfo-normalizer.c: MI normalization tool main file.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include <string.h>
+#include <assert.h>
+
+#include <apr_strings.h>
+#include <apr_tables.h>
+#include <apr_general.h>
+#include <apr_signal.h>
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+#include "svn_client.h"
+#include "svn_config.h"
+#include "svn_string.h"
+#include "svn_dirent_uri.h"
+#include "svn_path.h"
+#include "svn_delta.h"
+#include "svn_diff.h"
+#include "svn_error.h"
+#include "svn_io.h"
+#include "svn_opt.h"
+#include "svn_utf.h"
+#include "svn_auth.h"
+#include "svn_hash.h"
+#include "svn_version.h"
+#include "mergeinfo-normalizer.h"
+
+#include "private/svn_opt_private.h"
+#include "private/svn_cmdline_private.h"
+#include "private/svn_subr_private.h"
+
+#include "svn_private_config.h"
+
+
+/*** Option Processing ***/
+
+/* Add an identifier here for long options that don't have a short
+ option. Options that have both long and short options should just
+ use the short option letter as identifier. */
+typedef enum svn_min__longopt_t {
+ opt_auth_password = SVN_OPT_FIRST_LONGOPT_ID,
+ opt_auth_password_from_stdin,
+ opt_auth_username,
+ opt_config_dir,
+ opt_config_options,
+ opt_dry_run,
+ opt_no_auth_cache,
+ opt_targets,
+ opt_depth,
+ opt_version,
+ opt_non_interactive,
+ opt_force_interactive,
+ opt_trust_server_cert,
+ opt_trust_server_cert_unknown_ca,
+ opt_trust_server_cert_cn_mismatch,
+ opt_trust_server_cert_expired,
+ opt_trust_server_cert_not_yet_valid,
+ opt_trust_server_cert_other_failure,
+ opt_allow_mixed_revisions,
+ opt_remove_obsoletes,
+ opt_remove_redundant,
+ opt_combine_ranges,
+ opt_remove_redundant_misaligned
+} svn_cl__longopt_t;
+
+
+/* Option codes and descriptions for the command line client.
+ *
+ * The entire list must be terminated with an entry of nulls.
+ */
+const apr_getopt_option_t svn_min__options[] =
+{
+ {"help", 'h', 0, N_("show help on a subcommand")},
+ {NULL, '?', 0, N_("show help on a subcommand")},
+ {"quiet", 'q', 0, N_("print nothing, or only summary information")},
+ {"version", opt_version, 0, N_("show program version information")},
+ {"file", 'F', 1, N_("read list of branches to remove from file ARG.\n"
+ " "
+ "Each branch given on a separate line with no\n"
+ " "
+ "extra whitespace.")},
+ {"verbose", 'v', 0, N_("print extra information")},
+ {"username", opt_auth_username, 1, N_("specify a username ARG")},
+ {"password", opt_auth_password, 1,
+ N_("specify a password ARG (caution: on many operating\n"
+ " "
+ "systems, other users will be able to see this)")},
+ {"password-from-stdin",
+ opt_auth_password_from_stdin, 0,
+ N_("read password from stdin")},
+ {"targets", opt_targets, 1,
+ N_("pass contents of file ARG as additional args")},
+ {"depth", opt_depth, 1,
+ N_("limit operation by depth ARG ('empty', 'files',\n"
+ " "
+ "'immediates', or 'infinity')")},
+ {"no-auth-cache", opt_no_auth_cache, 0,
+ N_("do not cache authentication tokens")},
+ {"trust-server-cert", opt_trust_server_cert, 0,
+ N_("deprecated; same as --trust-unknown-ca")},
+ {"trust-unknown-ca", opt_trust_server_cert_unknown_ca, 0,
+ N_("with --non-interactive, accept SSL server\n"
+ " "
+ "certificates from unknown certificate authorities")},
+ {"trust-cn-mismatch", opt_trust_server_cert_cn_mismatch, 0,
+ N_("with --non-interactive, accept SSL server\n"
+ " "
+ "certificates even if the server hostname does not\n"
+ " "
+ "match the certificate's common name attribute")},
+ {"trust-expired", opt_trust_server_cert_expired, 0,
+ N_("with --non-interactive, accept expired SSL server\n"
+ " "
+ "certificates")},
+ {"trust-not-yet-valid", opt_trust_server_cert_not_yet_valid, 0,
+ N_("with --non-interactive, accept SSL server\n"
+ " "
+ "certificates from the future")},
+ {"trust-other-failure", opt_trust_server_cert_other_failure, 0,
+ N_("with --non-interactive, accept SSL server\n"
+ " "
+ "certificates with failures other than the above")},
+ {"non-interactive", opt_non_interactive, 0,
+ N_("do no interactive prompting (default is to prompt\n"
+ " "
+ "only if standard input is a terminal device)")},
+ {"force-interactive", opt_force_interactive, 0,
+ N_("do interactive prompting even if standard input\n"
+ " "
+ "is not a terminal device")},
+ {"dry-run", opt_dry_run, 0,
+ N_("try operation but make no changes")},
+ {"config-dir", opt_config_dir, 1,
+ N_("read user configuration files from directory ARG")},
+ {"config-option", opt_config_options, 1,
+ N_("set user configuration option in the format:\n"
+ " "
+ " FILE:SECTION:OPTION=[VALUE]\n"
+ " "
+ "For example:\n"
+ " "
+ " servers:global:http-library=serf")},
+ {"allow-mixed-revisions", opt_allow_mixed_revisions, 0,
+ N_("Allow operation on mixed-revision working copy.\n"
+ " "
+ "Use of this option is not recommended!\n"
+ " "
+ "Please run 'svn update' instead.")},
+
+ {"remove-obsoletes", opt_remove_obsoletes, 0,
+ N_("Remove mergeinfo for deleted branches.")},
+ {"remove-redundant", opt_remove_redundant, 0,
+ N_("Remove mergeinfo on sub-nodes if it is\n"
+ " "
+ "redundant with the parent mergeinfo.")},
+ {"remove-redundant-misaligned", opt_remove_redundant_misaligned, 0,
+ N_("Remove mergeinfo of a misaligned branch if it\n"
+ " "
+ "is already covered by a correctly aligned one.\n")},
+ {"combine-ranges", opt_combine_ranges, 0,
+ N_("Try to combine adjacent revision ranges\n"
+ " "
+ "to reduce the size of the mergeinfo.")},
+
+ {0, 0, 0, 0},
+};
+
+
+
+/*** Command dispatch. ***/
+
+/* Our array of available subcommands.
+ *
+ * The entire list must be terminated with an entry of nulls.
+ *
+ * In most of the help text "PATH" is used where a working copy path is
+ * required, "URL" where a repository URL is required and "TARGET" when
+ * either a path or a url can be used. Hmm, should this be part of the
+ * help text?
+ */
+
+/* Options that apply to all commands. (While not every command may
+ currently require authentication or be interactive, allowing every
+ command to take these arguments allows scripts to just pass them
+ willy-nilly to every invocation of 'svn') . */
+const int svn_min__global_options[] =
+{ opt_auth_username, opt_auth_password, opt_auth_password_from_stdin,
+ opt_no_auth_cache, opt_non_interactive, opt_force_interactive,
+ opt_trust_server_cert, opt_trust_server_cert_unknown_ca,
+ opt_trust_server_cert_cn_mismatch, opt_trust_server_cert_expired,
+ opt_trust_server_cert_not_yet_valid, opt_trust_server_cert_other_failure,
+ opt_config_dir, opt_config_options, 0
+};
+
+const svn_opt_subcommand_desc2_t svn_min__cmd_table[] =
+{
+ { "help", svn_min__help, {"?", "h"}, N_
+ ("Describe the usage of this program or its subcommands.\n"
+ "usage: help [SUBCOMMAND...]\n"),
+ {0} },
+
+ /* This command is also invoked if we see option "--help", "-h" or "-?". */
+
+ { "analyze", svn_min__analyze, { "analyse" }, N_
+ ("Generate a report of which part of the sub-tree mergeinfo can be\n"
+ "removed and which part can't.\n"
+ "usage: analyze [WCPATH...]\n"
+ "\n"
+ " If neither --remove-obsoletes, --remove-redundant nor --combine-ranges\n"
+ " option is given, all three will be used implicitly.\n"
+ "\n"
+ " In verbose mode, the command will behave just like 'normalize --dry-run'\n"
+ " but will show an additional summary of all deleted branches that were\n"
+ " encountered plus the revision of their latest deletion (if available).\n"
+ "\n"
+ " In non-verbose mode, the per-node output does not give the parent path,\n"
+ " no successful elisions and branch removals nor the list of remaining\n"
+ " branches.\n"
+ ),
+ {opt_targets, opt_depth, 'v',
+ opt_remove_obsoletes, opt_remove_redundant,
+ opt_remove_redundant_misaligned, opt_combine_ranges} },
+
+ { "normalize", svn_min__normalize, { 0 }, N_
+ ("Normalize / reduce the mergeinfo throughout the working copy sub-tree.\n"
+ "usage: normalize [WCPATH...]\n"
+ "\n"
+ " If neither --remove-obsoletes, --remove-redundant, --combine-ranges\n"
+ " nor --remove-redundant-misaligned option is given, --remove-redundant\n"
+ " will be used implicitly.\n"
+ "\n"
+ " In non-verbose mode, only general progress as well as a summary before\n"
+ " and after the normalization process will be shown. Note that sub-node\n"
+ " mergeinfo which could be removed entirely does not contribute to the\n"
+ " number of removed branch lines. Similarly, the number of revision\n"
+ " ranges combined only refers to the mergeinfo lines still present after\n"
+ " the normalization process. To get total numbers, compare the initial\n"
+ " with the final mergeinfo statistics.\n"
+ "\n"
+ " The detailed operation log in verbose mode replaces the progress display.\n"
+ " For each node with mergeinfo, the nearest parent node with mergeinfo is\n"
+ " given - if there is one and the result of trying to remove the mergeinfo\n"
+ " is shown for each branch. The various outputs are:\n"
+ "\n"
+ " elide redundant branch - Revision ranges are the same as in the parent.\n"
+ " Mergeinfo for this branch can be elided.\n"
+ " elide branch - Not an exact match with the parent but the\n"
+ " differences could be eliminated by ...\n"
+ " revisions implied in parent\n"
+ " ... ignoring these revisions because they are\n"
+ " part of the parent's copy history.\n"
+ " revisions moved to parent\n"
+ " ... adding these revisions to the parent node\n"
+ " because they only affect the current sub-tree.\n"
+ " revisions implied in sub-tree\n"
+ " ... ignoring these revisions because they are\n"
+ " part of the sub-tree's copy history.\n"
+ " revisions inoperative in sub-node\n"
+ " ... removing these revisions from the sub-tree\n"
+ " mergeinfo because they did not change it.\n"
+ " remove deleted branch - The branch no longer exists in the repository.\n"
+ " We will remove its mergeinfo line.\n"
+ " elide misaligned branch- All revisions merged from that misaligned\n"
+ " branch have also been merged from the likely\n"
+ " correctly aligned branch.\n"
+ " CANNOT elide branch - Mergeinfo differs from parent's significantly\n"
+ " and can't be elided because ...\n"
+ " revisions not movable to parent\n"
+ " ... these revisions affect the parent tree\n"
+ " outside the current sub-tree but are only\n"
+ " listed as merged in the current sub-tree.\n"
+ " revisions missing in sub-node\n"
+ " ... these revisions affect current sub-tree\n"
+ " but are only listed as merged for the parent.\n"
+ " keep POTENTIAL branch - The path does not exist @HEAD but may appear\n"
+ " in the future as the result of catch-up merges\n"
+ " from other branches.\n"
+ " has SURVIVING COPIES: - The path does not exist @HEAD but copies of it\n"
+ " or its sub-nodes do. This mergeinfo may be\n"
+ " relevant to them and will be kept.\n"
+ " NON-RECURSIVE RANGE(S) found\n"
+ " - Those revisions had been merged into a sparse\n"
+ " working copy resulting in incomplete merges.\n"
+ " The sub-tree mergeinfo cannot be elided.\n"
+ " MISSING in parent - The branch for the parent node exists in the\n"
+ " repository but is not in its mergeinfo.\n"
+ " The sub-tree mergeinfo will not be elided.\n"
+ " CANNOT elide MISALIGNED branch\n"
+ " The misaligned branch cannot be elide because\n"
+ " the revisions listed ...\n"
+ " revisions not merged from likely correctly aligned branch\n"
+ " ... here have not also been merged from the\n"
+ " likely correctly aligned branch.\n"
+ " MISALIGNED branch - There is no such branch for the parent node.\n"
+ " The sub-tree mergeinfo cannot be elided.\n"
+ " REVERSE RANGE(S) found - The mergeinfo contains illegal reverse ranges.\n"
+ " The sub-tree mergeinfo cannot be elided.\n"
+ "\n"
+ " If all branches have been removed from a nodes' mergeinfo, the whole\n"
+ " svn:mergeinfo property will be removed. Otherwise, only obsolete\n"
+ " branches will be removed. In verbose mode, a list of branches that\n"
+ " could not be removed will be shown per node.\n"),
+ {opt_targets, opt_depth, opt_dry_run, 'q', 'v',
+ opt_remove_obsoletes, opt_remove_redundant,
+ opt_remove_redundant_misaligned, opt_combine_ranges} },
+
+ { "remove-branches", svn_min__remove_branches, { 0 }, N_
+ ("Read a list of branch names from the given file and remove all\n"
+ "mergeinfo referring to these branches from the given targets.\n"
+ "usage: remove-branches [WCPATH...] --file FILE\n"
+ "\n"
+ " The command will behave just like 'normalize --remove-obsoletes' but\n"
+ " will never actually contact the repository. Instead, it assumes any\n"
+ " path given in FILE is a deleted branch.\n"
+ "\n"
+ " Compared to a simple 'normalize --remove-obsoletes' run, this command\n"
+ " allows for selective removal of obsolete branches. It may therefore be\n"
+ " better suited for large deployments with complex branch structures.\n"
+ " You may also use this to remove mergeinfo that refers to still existing\n"
+ " branches.\n"),
+ {opt_targets, opt_depth, opt_dry_run, 'q', 'v', 'F'} },
+
+ { NULL, NULL, {0}, NULL, {0} }
+};
+
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_subr", svn_subr_version },
+ { "svn_client", svn_client_version },
+ { "svn_wc", svn_wc_version },
+ { "svn_ra", svn_ra_version },
+ { "svn_delta", svn_delta_version },
+ { "svn_diff", svn_diff_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+
+/* A flag to see if we've been cancelled by the client or not. */
+static volatile sig_atomic_t cancelled = FALSE;
+
+/* A signal handler to support cancellation. */
+static void
+signal_handler(int signum)
+{
+ apr_signal(signum, SIG_IGN);
+ cancelled = TRUE;
+}
+
+/* Our cancellation callback. */
+svn_error_t *
+svn_min__check_cancel(void *baton)
+{
+ /* Cancel baton should be always NULL in command line client. */
+ SVN_ERR_ASSERT(baton == NULL);
+ if (cancelled)
+ return svn_error_create(SVN_ERR_CANCELLED, NULL, _("Caught signal"));
+ else
+ return SVN_NO_ERROR;
+}
+
+
+/*** Main. ***/
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ svn_error_t *err;
+ int opt_id;
+ apr_getopt_t *os;
+ svn_min__opt_state_t opt_state = { 0 };
+ svn_client_ctx_t *ctx;
+ apr_array_header_t *received_opts;
+ int i;
+ const svn_opt_subcommand_desc2_t *subcommand = NULL;
+ svn_min__cmd_baton_t command_baton = { 0 };
+ svn_auth_baton_t *ab;
+ svn_config_t *cfg_config;
+ svn_boolean_t interactive_conflicts = FALSE;
+ svn_boolean_t force_interactive = FALSE;
+ apr_hash_t *cfg_hash;
+ svn_boolean_t read_pass_from_stdin = FALSE;
+
+ received_opts = apr_array_make(pool, SVN_OPT_MAX_OPTIONS, sizeof(int));
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+#if defined(WIN32) || defined(__CYGWIN__)
+ /* Set the working copy administrative directory name. */
+ if (getenv("SVN_ASP_DOT_NET_HACK"))
+ {
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
+ }
+#endif
+
+ /* Initialize the RA library. */
+ SVN_ERR(svn_ra_initialize(pool));
+
+ /* Begin processing arguments. */
+ opt_state.depth = svn_depth_unknown;
+
+ /* No args? Show usage. */
+ if (argc <= 1)
+ {
+ SVN_ERR(svn_min__help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Else, parse options. */
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+
+ os->interleave = 1;
+ while (1)
+ {
+ const char *opt_arg;
+ const char *utf8_opt_arg;
+
+ /* Parse the next option. */
+ apr_status_t apr_err = apr_getopt_long(os, svn_min__options, &opt_id,
+ &opt_arg);
+ if (APR_STATUS_IS_EOF(apr_err))
+ break;
+ else if (apr_err)
+ {
+ SVN_ERR(svn_min__help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Stash the option code in an array before parsing it. */
+ APR_ARRAY_PUSH(received_opts, int) = opt_id;
+
+ switch (opt_id) {
+ case 'h':
+ case '?':
+ opt_state.help = TRUE;
+ break;
+ case 'q':
+ opt_state.quiet = TRUE;
+ break;
+ case 'v':
+ opt_state.verbose = TRUE;
+ break;
+ case 'F':
+ /* We read the raw file content here. */
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&(opt_state.filedata),
+ utf8_opt_arg, pool));
+ break;
+ case opt_targets:
+ {
+ svn_stringbuf_t *buffer, *buffer_utf8;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&buffer, utf8_opt_arg, pool));
+ SVN_ERR(svn_utf_stringbuf_to_utf8(&buffer_utf8, buffer, pool));
+ opt_state.targets = svn_cstring_split(buffer_utf8->data, "\n\r",
+ TRUE, pool);
+ }
+ break;
+ case opt_depth:
+ err = svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool);
+ if (err)
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, err,
+ _("Error converting depth "
+ "from locale to UTF-8"));
+ opt_state.depth = svn_depth_from_word(utf8_opt_arg);
+ if (opt_state.depth == svn_depth_unknown
+ || opt_state.depth == svn_depth_exclude)
+ {
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("'%s' is not a valid depth; try "
+ "'empty', 'files', 'immediates', "
+ "or 'infinity'"),
+ utf8_opt_arg);
+ }
+ break;
+ case opt_version:
+ opt_state.version = TRUE;
+ break;
+ case opt_dry_run:
+ opt_state.dry_run = TRUE;
+ break;
+ case opt_auth_username:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_username,
+ opt_arg, pool));
+ break;
+ case opt_auth_password:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_password,
+ opt_arg, pool));
+ break;
+ case opt_auth_password_from_stdin:
+ read_pass_from_stdin = TRUE;
+ break;
+ case opt_no_auth_cache:
+ opt_state.no_auth_cache = TRUE;
+ break;
+ case opt_non_interactive:
+ opt_state.non_interactive = TRUE;
+ break;
+ case opt_force_interactive:
+ force_interactive = TRUE;
+ break;
+ case opt_trust_server_cert: /* backwards compat to 1.8 */
+ case opt_trust_server_cert_unknown_ca:
+ opt_state.trust_server_cert_unknown_ca = TRUE;
+ break;
+ case opt_trust_server_cert_cn_mismatch:
+ opt_state.trust_server_cert_cn_mismatch = TRUE;
+ break;
+ case opt_trust_server_cert_expired:
+ opt_state.trust_server_cert_expired = TRUE;
+ break;
+ case opt_trust_server_cert_not_yet_valid:
+ opt_state.trust_server_cert_not_yet_valid = TRUE;
+ break;
+ case opt_trust_server_cert_other_failure:
+ opt_state.trust_server_cert_other_failure = TRUE;
+ break;
+ case opt_config_dir:
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ opt_state.config_dir = svn_dirent_internal_style(utf8_opt_arg, pool);
+ break;
+ case opt_config_options:
+ if (!opt_state.config_options)
+ opt_state.config_options =
+ apr_array_make(pool, 1,
+ sizeof(svn_cmdline__config_argument_t*));
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ SVN_ERR(svn_cmdline__parse_config_option(opt_state.config_options,
+ utf8_opt_arg,
+ "svn-mi-normalizer: ",
+ pool));
+ break;
+ case opt_allow_mixed_revisions:
+ opt_state.allow_mixed_rev = TRUE;
+ break;
+
+ case opt_remove_obsoletes:
+ opt_state.remove_obsoletes = TRUE;
+ break;
+ case opt_remove_redundant:
+ opt_state.remove_redundants = TRUE;
+ break;
+ case opt_combine_ranges:
+ opt_state.combine_ranges = TRUE;
+ break;
+ case opt_remove_redundant_misaligned:
+ opt_state.remove_redundant_misaligned = TRUE;
+ break;
+
+ default:
+ /* Hmmm. Perhaps this would be a good place to squirrel away
+ opts that commands like svn diff might need. Hmmm indeed. */
+ break;
+ }
+ }
+
+ /* The --non-interactive and --force-interactive options are mutually
+ * exclusive. */
+ if (opt_state.non_interactive && force_interactive)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--non-interactive and --force-interactive "
+ "are mutually exclusive"));
+ }
+ else
+ opt_state.non_interactive = !svn_cmdline__be_interactive(
+ opt_state.non_interactive,
+ force_interactive);
+
+ /* --password-from-stdin can only be used with --non-interactive */
+ if (read_pass_from_stdin && !opt_state.non_interactive)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--password-from-stdin requires "
+ "--non-interactive"));
+ }
+
+ /* ### This really belongs in libsvn_client. The trouble is,
+ there's no one place there to run it from, no
+ svn_client_init(). We'd have to add it to all the public
+ functions that a client might call. It's unmaintainable to do
+ initialization from within libsvn_client itself, but it seems
+ burdensome to demand that all clients call svn_client_init()
+ before calling any other libsvn_client function... On the other
+ hand, the alternative is effectively to demand that they call
+ svn_config_ensure() instead, so maybe we should have a generic
+ init function anyway. Thoughts? */
+ SVN_ERR(svn_config_ensure(opt_state.config_dir, pool));
+
+ /* If the user asked for help, then the rest of the arguments are
+ the names of subcommands to get help on (if any), or else they're
+ just typos/mistakes. Whatever the case, the subcommand to
+ actually run is svn_cl__help(). */
+ if (opt_state.help)
+ subcommand = svn_opt_get_canonical_subcommand2(svn_min__cmd_table, "help");
+
+ /* If we're not running the `help' subcommand, then look for a
+ subcommand in the first argument. */
+ if (subcommand == NULL)
+ {
+ if (os->ind >= os->argc)
+ {
+ if (opt_state.version)
+ {
+ /* Use the "help" subcommand to handle the "--version" option. */
+ static const svn_opt_subcommand_desc2_t pseudo_cmd =
+ { "--version", svn_min__help, {0}, "",
+ {opt_version, /* must accept its own option */
+ 'q', /* brief output */
+ 'v', /* verbose output */
+ opt_config_dir /* all commands accept this */
+ } };
+
+ subcommand = &pseudo_cmd;
+ }
+ else
+ {
+ svn_error_clear
+ (svn_cmdline_fprintf(stderr, pool,
+ _("Subcommand argument required\n")));
+ svn_error_clear(svn_min__help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ else
+ {
+ const char *first_arg;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&first_arg, os->argv[os->ind++],
+ pool));
+ subcommand = svn_opt_get_canonical_subcommand2(svn_min__cmd_table,
+ first_arg);
+ if (subcommand == NULL)
+ {
+ svn_error_clear
+ (svn_cmdline_fprintf(stderr, pool,
+ _("Unknown subcommand: '%s'\n"),
+ first_arg));
+ svn_error_clear(svn_min__help(NULL, NULL, pool));
+
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+
+ /* Check that the subcommand wasn't passed any inappropriate options. */
+ for (i = 0; i < received_opts->nelts; i++)
+ {
+ opt_id = APR_ARRAY_IDX(received_opts, i, int);
+
+ /* All commands implicitly accept --help, so just skip over this
+ when we see it. Note that we don't want to include this option
+ in their "accepted options" list because it would be awfully
+ redundant to display it in every commands' help text. */
+ if (opt_id == 'h' || opt_id == '?')
+ continue;
+
+ if (! svn_opt_subcommand_takes_option3(subcommand, opt_id,
+ svn_min__global_options))
+ {
+ const char *optstr;
+ const apr_getopt_option_t *badopt =
+ svn_opt_get_option_from_code2(opt_id, svn_min__options,
+ subcommand, pool);
+ svn_opt_format_option(&optstr, badopt, FALSE, pool);
+ if (subcommand->name[0] == '-')
+ svn_error_clear(svn_min__help(NULL, NULL, pool));
+ else
+ svn_error_clear
+ (svn_cmdline_fprintf
+ (stderr, pool, _("Subcommand '%s' doesn't accept option '%s'\n"
+ "Type 'svn-mergeinfo-normalizer help %s' for usage.\n"),
+ subcommand->name, optstr, subcommand->name));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* --trust-* options can only be used with --non-interactive */
+ if (!opt_state.non_interactive)
+ {
+ if (opt_state.trust_server_cert_unknown_ca)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-unknown-ca requires "
+ "--non-interactive"));
+ if (opt_state.trust_server_cert_cn_mismatch)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-cn-mismatch requires "
+ "--non-interactive"));
+ if (opt_state.trust_server_cert_expired)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-expired requires "
+ "--non-interactive"));
+ if (opt_state.trust_server_cert_not_yet_valid)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-not-yet-valid requires "
+ "--non-interactive"));
+ if (opt_state.trust_server_cert_other_failure)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-other-failure requires "
+ "--non-interactive"));
+ }
+
+ err = svn_config_get_config(&cfg_hash, opt_state.config_dir, pool);
+ if (err)
+ {
+ /* Fallback to default config if the config directory isn't readable
+ or is not a directory. */
+ if (APR_STATUS_IS_EACCES(err->apr_err)
+ || SVN__APR_STATUS_IS_ENOTDIR(err->apr_err))
+ {
+ svn_handle_warning2(stderr, err, "svn: ");
+ svn_error_clear(err);
+
+ SVN_ERR(svn_config__get_default_config(&cfg_hash, pool));
+ }
+ else
+ return err;
+ }
+
+ /* Update the options in the config */
+ if (opt_state.config_options)
+ {
+ svn_error_clear(
+ svn_cmdline__apply_config_options(cfg_hash,
+ opt_state.config_options,
+ "svn: ", "--config-option"));
+ }
+
+ cfg_config = svn_hash_gets(cfg_hash, SVN_CONFIG_CATEGORY_CONFIG);
+#if !defined(SVN_CL_NO_EXCLUSIVE_LOCK)
+ {
+ const char *exclusive_clients_option;
+ apr_array_header_t *exclusive_clients;
+
+ svn_config_get(cfg_config, &exclusive_clients_option,
+ SVN_CONFIG_SECTION_WORKING_COPY,
+ SVN_CONFIG_OPTION_SQLITE_EXCLUSIVE_CLIENTS,
+ NULL);
+ exclusive_clients = svn_cstring_split(exclusive_clients_option,
+ " ,", TRUE, pool);
+ for (i = 0; i < exclusive_clients->nelts; ++i)
+ {
+ const char *exclusive_client = APR_ARRAY_IDX(exclusive_clients, i,
+ const char *);
+
+ /* This blocks other clients from accessing the wc.db so it must
+ be explicitly enabled.*/
+ if (!strcmp(exclusive_client, "svn"))
+ svn_config_set(cfg_config,
+ SVN_CONFIG_SECTION_WORKING_COPY,
+ SVN_CONFIG_OPTION_SQLITE_EXCLUSIVE,
+ "true");
+ }
+ }
+#endif
+
+ /* Get password from stdin if necessary */
+ if (read_pass_from_stdin)
+ {
+ SVN_ERR(svn_cmdline__stdin_readline(&opt_state.auth_password, pool, pool));
+ }
+
+ /* Create a client context object. */
+ command_baton.opt_state = &opt_state;
+ SVN_ERR(svn_client_create_context2(&ctx, cfg_hash, pool));
+ command_baton.ctx = ctx;
+
+ /* Set up our cancellation support. */
+ ctx->cancel_func = svn_min__check_cancel;
+ apr_signal(SIGINT, signal_handler);
+#ifdef SIGBREAK
+ /* SIGBREAK is a Win32 specific signal generated by ctrl-break. */
+ apr_signal(SIGBREAK, signal_handler);
+#endif
+#ifdef SIGHUP
+ apr_signal(SIGHUP, signal_handler);
+#endif
+#ifdef SIGTERM
+ apr_signal(SIGTERM, signal_handler);
+#endif
+
+#ifdef SIGPIPE
+ /* Disable SIGPIPE generation for the platforms that have it. */
+ apr_signal(SIGPIPE, SIG_IGN);
+#endif
+
+#ifdef SIGXFSZ
+ /* Disable SIGXFSZ generation for the platforms that have it, otherwise
+ * working with large files when compiled against an APR that doesn't have
+ * large file support will crash the program, which is uncool. */
+ apr_signal(SIGXFSZ, SIG_IGN);
+#endif
+
+ /* Set up Authentication stuff. */
+ SVN_ERR(svn_cmdline_create_auth_baton2(
+ &ab,
+ opt_state.non_interactive,
+ opt_state.auth_username,
+ opt_state.auth_password,
+ opt_state.config_dir,
+ opt_state.no_auth_cache,
+ opt_state.trust_server_cert_unknown_ca,
+ opt_state.trust_server_cert_cn_mismatch,
+ opt_state.trust_server_cert_expired,
+ opt_state.trust_server_cert_not_yet_valid,
+ opt_state.trust_server_cert_other_failure,
+ cfg_config,
+ ctx->cancel_func,
+ ctx->cancel_baton,
+ pool));
+
+ ctx->auth_baton = ab;
+
+ /* Check whether interactive conflict resolution is disabled by
+ * the configuration file. If no --accept option was specified
+ * we postpone all conflicts in this case. */
+ SVN_ERR(svn_config_get_bool(cfg_config, &interactive_conflicts,
+ SVN_CONFIG_SECTION_MISCELLANY,
+ SVN_CONFIG_OPTION_INTERACTIVE_CONFLICTS,
+ TRUE));
+
+ /* Get targets from command line - unless we are running "help".
+ * The help sub-command will do its own parsing. */
+ if (strcmp(subcommand->name, "help"))
+ {
+ SVN_ERR(svn_client_args_to_target_array2(&opt_state.targets,
+ os, opt_state.targets,
+ ctx, FALSE, pool));
+
+ /* Add "." if user passed 0 arguments. */
+ svn_opt_push_implicit_dot_target(opt_state.targets, pool);
+ }
+
+ /* And now we finally run the subcommand. */
+ err = (*subcommand->cmd_func)(os, &command_baton, pool);
+ if (err)
+ {
+ /* For argument-related problems, suggest using the 'help'
+ subcommand. */
+ if (err->apr_err == SVN_ERR_CL_INSUFFICIENT_ARGS
+ || err->apr_err == SVN_ERR_CL_ARG_PARSING_ERROR)
+ {
+ err = svn_error_quick_wrap(
+ err, apr_psprintf(pool,
+ _("Try 'svn help %s' for more information"),
+ subcommand->name));
+ }
+
+ if (err->apr_err == SVN_ERR_AUTHN_FAILED && opt_state.non_interactive)
+ {
+ err = svn_error_quick_wrap(err,
+ _("Authentication failed and interactive"
+ " prompting is disabled; see the"
+ " --force-interactive option"));
+ }
+
+ /* Tell the user about 'svn cleanup' if any error on the stack
+ was about locked working copies. */
+ if (svn_error_find_cause(err, SVN_ERR_WC_LOCKED))
+ {
+ err = svn_error_quick_wrap(
+ err, _("Run 'svn cleanup' to remove locks "
+ "(type 'svn help cleanup' for details)"));
+ }
+
+ if (err->apr_err == SVN_ERR_SQLITE_BUSY)
+ {
+ err = svn_error_quick_wrap(err,
+ _("Another process is blocking the "
+ "working copy database, or the "
+ "underlying filesystem does not "
+ "support file locking; if the working "
+ "copy is on a network filesystem, make "
+ "sure file locking has been enabled "
+ "on the file server"));
+ }
+
+ if (svn_error_find_cause(err, SVN_ERR_RA_CANNOT_CREATE_TUNNEL) &&
+ (opt_state.auth_username || opt_state.auth_password))
+ {
+ err = svn_error_quick_wrap(
+ err, _("When using svn+ssh:// URLs, keep in mind that the "
+ "--username and --password options are ignored "
+ "because authentication is performed by SSH, not "
+ "Subversion"));
+ }
+
+ return err;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svn", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svn: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/client-side/svn-mergeinfo-normalizer/wc_mergeinfo.c b/tools/client-side/svn-mergeinfo-normalizer/wc_mergeinfo.c
new file mode 100644
index 0000000..6d7b841
--- /dev/null
+++ b/tools/client-side/svn-mergeinfo-normalizer/wc_mergeinfo.c
@@ -0,0 +1,491 @@
+/*
+ * wc_mergeinfo.c -- Query and store the mergeinfo.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_client.h"
+#include "svn_string.h"
+#include "svn_sorts.h"
+#include "svn_dirent_uri.h"
+#include "svn_props.h"
+#include "svn_hash.h"
+
+#include "mergeinfo-normalizer.h"
+
+#include "private/svn_fspath.h"
+#include "private/svn_opt_private.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_subr_private.h"
+#include "svn_private_config.h"
+
+
+
+/* Our internal mergeinfo structure
+ * It decorates the standard svn_mergeinfo_t with path and parent info. */
+typedef struct mergeinfo_t
+{
+ /* The abspath of the working copy node that has this MERGINFO. */
+ const char *local_path;
+
+ /* The corresponding FS path. */
+ const char *fs_path;
+
+ /* The full URL of that node in the repository. */
+ const char *url;
+
+ /* Pointer to the closest parent mergeinfo that we found in the working
+ * copy. May be NULL. */
+ struct mergeinfo_t *parent;
+
+ /* All mergeinfo_t* who's PARENT points to this. May be NULL. */
+ apr_array_header_t *children;
+
+ /* The parsed mergeinfo. */
+ svn_mergeinfo_t mergeinfo;
+} mergeinfo_t;
+
+/* Parse the mergeinfo in PROPS as returned by svn_client_propget5,
+ * construct our internal mergeinfo representation, allocated in
+ * RESULT_POOL from it and return it *RESULT_P. Use SCRATCH_POOL for
+ * temporary allocations. */
+static svn_error_t *
+parse_mergeinfo(apr_array_header_t **result_p,
+ apr_hash_t *props,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *result = apr_array_make(result_pool,
+ apr_hash_count(props),
+ sizeof(mergeinfo_t *));
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ apr_hash_index_t *hi;
+
+ for (hi = apr_hash_first(scratch_pool, props); hi; hi = apr_hash_next(hi))
+ {
+ mergeinfo_t *entry = apr_pcalloc(result_pool, sizeof(*entry));
+ svn_mergeinfo_t mergeinfo;
+ svn_string_t *mi_string = apr_hash_this_val(hi);
+
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_mergeinfo_parse(&mergeinfo, mi_string->data, iterpool));
+
+ entry->local_path = apr_pstrdup(result_pool, apr_hash_this_key(hi));
+ entry->mergeinfo = svn_mergeinfo_dup(mergeinfo, result_pool);
+
+ APR_ARRAY_PUSH(result, mergeinfo_t *) = entry;
+ }
+
+ svn_pool_destroy(iterpool);
+ *result_p = result;
+
+ return SVN_NO_ERROR;
+}
+
+/* Ordering function comparing two mergeinfo_t * by local abspath. */
+static int
+compare_mergeinfo(const void *lhs,
+ const void *rhs)
+{
+ const mergeinfo_t *lhs_mi = *(const mergeinfo_t **)lhs;
+ const mergeinfo_t *rhs_mi = *(const mergeinfo_t **)rhs;
+
+ return strcmp(lhs_mi->local_path, rhs_mi->local_path);
+}
+
+/* Implements svn_client_info_receiver2_t.
+ * Updates the mergeinfo_t * given as BATON with the incoming INFO. */
+static svn_error_t *
+get_urls(void *baton,
+ const char *target,
+ const svn_client_info2_t *info,
+ apr_pool_t *pool)
+{
+ mergeinfo_t *mi = baton;
+ apr_pool_t *target_pool = apr_hash_pool_get(mi->mergeinfo);
+ const char *rel_path = svn_uri_skip_ancestor(info->repos_root_URL,
+ info->URL, pool);
+
+ mi->url = apr_pstrdup(target_pool, info->URL);
+ mi->fs_path = svn_fspath__canonicalize(rel_path, target_pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Sort the nodes in MERGEINFO, sub-nodes first, add working copy info to
+ * it and link nodes to their respective closest parents. BATON provides
+ * the client context. SCRATCH_POOL is used for temporaries. */
+static svn_error_t *
+link_parents(apr_array_header_t *mergeinfo,
+ svn_min__cmd_baton_t *baton,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *result_pool = mergeinfo->pool;
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ int i;
+
+ /* We further down assume that there are is least one entry. */
+ if (mergeinfo->nelts == 0)
+ return SVN_NO_ERROR;
+
+ /* sort mergeinfo by path */
+ svn_sort__array(mergeinfo, compare_mergeinfo);
+
+ /* add URL info */
+ for (i = 0; i < mergeinfo->nelts; ++i)
+ {
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+ const svn_opt_revision_t rev_working = { svn_opt_revision_working };
+
+ svn_pool_clear(iterpool);
+ SVN_ERR(svn_client_info4(entry->local_path, &rev_working,
+ &rev_working, svn_depth_empty, FALSE,
+ TRUE, FALSE, NULL, get_urls, entry,
+ baton->ctx, iterpool));
+ }
+
+ /* link all mergeinfo to their parent merge info - if that exists */
+ for (i = 1; i < mergeinfo->nelts; ++i)
+ {
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+ entry->parent = APR_ARRAY_IDX(mergeinfo, i - 1, mergeinfo_t *);
+
+ while ( entry->parent
+ && !svn_dirent_is_ancestor(entry->parent->local_path,
+ entry->local_path))
+ entry->parent = entry->parent->parent;
+
+ /* Reverse pointer. */
+ if (entry->parent)
+ {
+ if (!entry->parent->children)
+ entry->parent->children
+ = apr_array_make(result_pool, 4, sizeof(svn_mergeinfo_t));
+
+ APR_ARRAY_PUSH(entry->parent->children, svn_mergeinfo_t)
+ = entry->mergeinfo;
+ }
+ }
+
+ /* break links for switched paths */
+ for (i = 1; i < mergeinfo->nelts; ++i)
+ {
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+ if (entry->parent)
+ {
+ if (!svn_uri__is_ancestor(entry->parent->url, entry->url))
+ entry->parent = NULL;
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__read_mergeinfo(apr_array_header_t **result,
+ svn_min__cmd_baton_t *baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_min__opt_state_t *opt_state = baton->opt_state;
+ svn_client_ctx_t *ctx = baton->ctx;
+
+ /* Pools for temporary data - to be cleaned up asap as they
+ * significant amounts of it. */
+ apr_pool_t *props_pool = svn_pool_create(scratch_pool);
+ apr_pool_t *props_scratch_pool = svn_pool_create(scratch_pool);
+ apr_hash_t *props;
+
+ const svn_opt_revision_t rev_working = { svn_opt_revision_working };
+
+ if (!baton->opt_state->quiet)
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _("Scanning working copy %s ...\n"),
+ baton->local_abspath));
+
+ SVN_ERR(svn_client_propget5(&props, NULL, SVN_PROP_MERGEINFO,
+ baton->local_abspath, &rev_working,
+ &rev_working, NULL,
+ opt_state->depth, NULL, ctx,
+ props_pool, props_scratch_pool));
+ svn_pool_destroy(props_scratch_pool);
+
+ SVN_ERR(parse_mergeinfo(result, props, result_pool, scratch_pool));
+ svn_pool_destroy(props_pool);
+
+ SVN_ERR(link_parents(*result, baton, scratch_pool));
+
+ if (!baton->opt_state->quiet)
+ SVN_ERR(svn_min__print_mergeinfo_stats(*result, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+const char *
+svn_min__common_parent(apr_array_header_t *mergeinfo,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ const char *result = NULL;
+ int i;
+
+ for (i = 0; i < mergeinfo->nelts; ++i)
+ {
+ apr_hash_index_t *hi;
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+
+ svn_pool_clear(iterpool);
+
+ /* Make common base path cover the wc's FS path. */
+ if (result == NULL)
+ result = apr_pstrdup(result_pool, entry->fs_path);
+ else if (!svn_dirent_is_ancestor(result, entry->fs_path))
+ result = svn_dirent_get_longest_ancestor(result, entry->fs_path,
+ result_pool);
+
+ /* Cover the branch FS paths mentioned in the mergeinfo. */
+ for (hi = apr_hash_first(scratch_pool, entry->mergeinfo);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const char * path = apr_hash_this_key(hi);
+ if (!svn_dirent_is_ancestor(result, path))
+ result = svn_dirent_get_longest_ancestor(result, path,
+ result_pool);
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ return result;
+}
+
+void
+svn_min__get_mergeinfo_pair(const char **fs_path,
+ const char **parent_path,
+ const char **subtree_relpath,
+ svn_mergeinfo_t *parent_mergeinfo,
+ svn_mergeinfo_t *subtree_mergeinfo,
+ apr_array_header_t **siblings_mergeinfo,
+ apr_array_header_t *mergeinfo,
+ int idx)
+{
+ mergeinfo_t *entry;
+ if (idx < 0 || mergeinfo->nelts <= idx)
+ {
+ *fs_path = "";
+ *parent_path = "";
+ *subtree_relpath = "";
+ *parent_mergeinfo = NULL;
+ *subtree_mergeinfo = NULL;
+ *siblings_mergeinfo = NULL;
+
+ return;
+ }
+
+ entry = APR_ARRAY_IDX(mergeinfo, idx, mergeinfo_t *);
+ *fs_path = entry->fs_path;
+ *subtree_mergeinfo = entry->mergeinfo;
+
+ if (!entry->parent)
+ {
+ *parent_path = entry->local_path;
+ *subtree_relpath = "";
+ *parent_mergeinfo = NULL;
+ *siblings_mergeinfo = NULL;
+
+ return;
+ }
+
+ *parent_path = entry->parent->local_path;
+ *subtree_relpath = svn_dirent_skip_ancestor(entry->parent->local_path,
+ entry->local_path);
+ *parent_mergeinfo = entry->parent->mergeinfo;
+ *siblings_mergeinfo = entry->parent->children;
+}
+
+svn_mergeinfo_t
+svn_min__get_mergeinfo(apr_array_header_t *mergeinfo,
+ int idx)
+{
+ SVN_ERR_ASSERT_NO_RETURN(idx >= 0 && idx < mergeinfo->nelts);
+ return APR_ARRAY_IDX(mergeinfo, idx, mergeinfo_t *)->mergeinfo;
+}
+
+svn_error_t *
+svn_min__sibling_ranges(apr_hash_t **sibling_ranges,
+ apr_array_header_t *sibling_mergeinfo,
+ const char *parent_path,
+ svn_rangelist_t *relevant_ranges,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ int i;
+ apr_hash_t *result = svn_hash__make(result_pool);
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+
+ for (i = 0; i < sibling_mergeinfo->nelts; ++i)
+ {
+ svn_mergeinfo_t mergeinfo;
+ apr_hash_index_t *hi;
+
+ svn_pool_clear(iterpool);
+ mergeinfo = APR_ARRAY_IDX(sibling_mergeinfo, i, svn_mergeinfo_t);
+
+ for (hi = apr_hash_first(iterpool, mergeinfo);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ const char *path = apr_hash_this_key(hi);
+ if (svn_dirent_is_ancestor(parent_path, path))
+ {
+ svn_rangelist_t *common, *ranges = apr_hash_this_val(hi);
+ SVN_ERR(svn_rangelist_intersect(&common, ranges,
+ relevant_ranges, TRUE,
+ result_pool));
+
+ if (common->nelts)
+ {
+ svn_hash_sets(result, apr_pstrdup(result_pool, path),
+ common);
+ }
+ }
+ }
+ }
+
+ svn_pool_destroy(iterpool);
+ *sibling_ranges = result;
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__write_mergeinfo(svn_min__cmd_baton_t *baton,
+ apr_array_header_t *mergeinfo,
+ apr_pool_t *scratch_pool)
+{
+ svn_client_ctx_t *ctx = baton->ctx;
+
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+ int i;
+
+ for (i = 0; i < mergeinfo->nelts; ++i)
+ {
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+ svn_string_t *propval = NULL;
+ apr_array_header_t *targets;
+
+ svn_pool_clear(iterpool);
+
+ targets = apr_array_make(iterpool, 1, sizeof(const char *));
+ APR_ARRAY_PUSH(targets, const char *) = entry->local_path;
+
+ /* If the mergeinfo is empty, keep the NULL PROPVAL to actually
+ * delete the property. */
+ if (apr_hash_count(entry->mergeinfo))
+ SVN_ERR(svn_mergeinfo_to_string(&propval, entry->mergeinfo,
+ iterpool));
+
+ SVN_ERR(svn_client_propset_local(SVN_PROP_MERGEINFO, propval, targets,
+ svn_depth_empty, FALSE, NULL, ctx,
+ iterpool));
+ }
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__remove_empty_mergeinfo(apr_array_header_t *mergeinfo)
+{
+ int i;
+ int dest;
+
+ for (i = 0, dest = 0; i < mergeinfo->nelts; ++i)
+ {
+ mergeinfo_t *entry = APR_ARRAY_IDX(mergeinfo, i, mergeinfo_t *);
+ if (apr_hash_count(entry->mergeinfo))
+ {
+ APR_ARRAY_IDX(mergeinfo, dest, mergeinfo_t *) = entry;
+ ++dest;
+ }
+ }
+
+ mergeinfo->nelts = dest;
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_min__print_mergeinfo_stats(apr_array_header_t *wc_mergeinfo,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+
+ int branch_count = 0;
+ int range_count = 0;
+
+ /* Aggregate numbers. */
+ int i;
+ for (i = 0; i < wc_mergeinfo->nelts; ++i)
+ {
+ apr_hash_index_t *hi;
+ svn_mergeinfo_t mergeinfo = svn_min__get_mergeinfo(wc_mergeinfo, i);
+
+ svn_pool_clear(iterpool);
+
+ branch_count += apr_hash_count(mergeinfo);
+
+ for (hi = apr_hash_first(iterpool, mergeinfo);
+ hi;
+ hi = apr_hash_next(hi))
+ {
+ svn_rangelist_t *ranges = apr_hash_this_val(hi);
+ range_count += ranges->nelts;
+ }
+ }
+
+ /* Show them. */
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Found mergeinfo on %d nodes.\n"),
+ wc_mergeinfo->nelts));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Found %d branch entries.\n"),
+ branch_count));
+ SVN_ERR(svn_cmdline_printf(scratch_pool,
+ _(" Found %d merged revision ranges.\n\n"),
+ range_count));
+
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
diff --git a/tools/client-side/svn-ssl-fingerprints.sh b/tools/client-side/svn-ssl-fingerprints.sh
new file mode 100755
index 0000000..828ea4a
--- /dev/null
+++ b/tools/client-side/svn-ssl-fingerprints.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# $0 --- list the fingerprints of SSL certificates that svn has seen before.
+#
+# SYNOPSIS:
+# $0
+# $0 /path/to/.subversion
+
+CONFIG_DIR=${1-$HOME/.subversion}
+for i in $CONFIG_DIR/auth/svn.ssl.server/????????????????????????????????; do
+ grep :// $i
+ grep '.\{80\}' $i | sed 's/\(.\{64\}\)/\1 /g' | xargs -n1 | openssl base64 -d | openssl x509 -inform der -noout -fingerprint | sed 's/=/ /' | xargs -n1
+ echo
+done
diff --git a/tools/client-side/svn-vendor.py b/tools/client-side/svn-vendor.py
new file mode 100755
index 0000000..d96aec7
--- /dev/null
+++ b/tools/client-side/svn-vendor.py
@@ -0,0 +1,1188 @@
+#!/usr/bin/python3
+# vim: set sw=4 expandtab :
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+##############################################################################
+# svn-vendor.py
+#
+# Overview
+# --------
+# Replacement for svn_load_dirs.pl (included as a 'contributed utility' in
+# Subversion sources). Main difference is some heuristics in detection of
+# the renames. Note that this script does not attempt to automate remote
+# SVN operations (check-out, check-in and tagging), so it is possible to
+# review the state of sources that are about to be checked in. Another
+# difference is an ability to save the detected renames, review/re-apply
+# them.
+#
+# This script requires Python 3.3.x or higher. Sorry, I was too lazy
+# to write shell quoting routines that are already available in recent
+# Python versions.
+#
+# Using this script
+# -----------------
+# First, it is necessary to check out the working copy from the URL that
+# will host the imported sources. E.g., if the versions of FOO are being
+# imported into svn://example.com/vendor/FOO/current:
+#
+# svn co svn://example.com/vendor/FOO/current wc
+#
+# Then, unpack the sources of the version to be imported:
+#
+# tar xzf foo-1.1.tar.gz
+#
+# Examples below assume the command above created a `foo-1.1' directory.
+# After that, there are three different modes of operation:
+#
+# 1. Fully automatic
+#
+# svn-vendor.py --auto wc foo-1.1
+# svn st wc
+# svn ci wc
+#
+# In this mode, the script fully relies on its heuristics in detection of
+# renames. In many cases, it "just works". There can be spurious moves
+# detected in this mode, though. For example, consider a deleted header
+# that consists of 50 lines of GPL text, 1 line of copyright, and
+# 3 lines of declarations, and a similar unrelated header in the imported
+# sources. From the script's point of view, the files are nearly identical
+# (4 lines removed, 4 lines added, 50 lines unchanged).
+#
+# After the script completes, examine the working copy by doing 'svn diff'
+# and/or 'svn status', paying particular attention to renames. If all the
+# moves are detected correctly, check in the changes in the working copy.
+#
+# 2. Semi-automatic
+#
+# svn-vendor.py --detect moves-foo-1.1.txt wc foo-1.1
+# vi moves-foo-1.1.txt
+# svn-vendor.py --apply moves-foo-1.1.txt wc foo-1.1
+# svn ci wc
+#
+# If the fully automatic mode mis-detected some spurious moves, or did not
+# detect some renames you want to be performed, it is still possible to
+# leverage what the script has detected automatically. First command above
+# does the automatic detection, just as it does in fully automatic mode,
+# but stops short of performing any modification of the working copy.
+# The list of detected copies and renames is saved into a text file,
+# `moves-foo-1.1.txt'.
+#
+# That file can be inspected after the script finishes. Spurious moves can
+# be deleted from the file, and new copies/renames can be added. Then the
+# changes can be applied to the working copy.
+#
+# 3. Manual
+#
+# svn-vendor.py wc foo-1.1
+# (svn-vendor) detect
+# (svn-vendor) move x.c y.c
+# (svn-vendor) move include/1.h include/2.h
+# (svn-vendor) copy include/3.h include/3-copy.h
+# (svn-vendor) lsprep
+# (svn-vendor) save /tmp/renames-to-be-applied.txt
+# (svn-vendor) apply
+#
+# If the automatic detection does not help, it is possible to do the renames
+# manually (similarly to svn_load_dirs.pl). Use the 'help' command to get
+# the list of supported commands and their description. Feel free to play
+# around - since the script does not perform any remote SVN operation,
+# there is no chance to commit the changes accidentally.
+#
+# Notes
+# -----
+# I. The time for rename detection O(Fs*Fd) + O(Ds*Dd), where Fs is
+# the number of files removed from current directory, Fd is number of files
+# added in imported sources, and Ds/Dd is the same for directories. That is,
+# the running time may become an issue if the numbers of added/removed files
+# go into a few thousands (e.g. if updating Linux kernel 2.6.35 to 3.10).
+# As a workaround, import interim releases first so that the number of
+# renames remains sane at each step. That makes reviewing the renames
+# performed by the script much easier.
+#
+# Enjoy!
+#
+##############################################################################
+
+import argparse
+import cmd
+import difflib
+import filecmp
+import os
+import readline
+import shlex
+import shutil
+import subprocess
+import sys
+
+def name_similarity(n1, n2):
+ '''
+ Function to be used as a key for sorting dirs/files by name matching
+ '''
+ sm = difflib.SequenceMatcher(a=n1, b=n2)
+ return 1.0 - sm.ratio()
+
+
+def filename_sort_key(s):
+ '''
+ Function to sort filenames so that parent directory is always followed
+ by its children. Without it, [ "/a", "/a-b", "/a/b", "/a-b/c" ] would
+ not be sorted correctly.
+ '''
+ return s.replace('/', '\001')
+
+
+def descendant_or_self(path, ancestor):
+ '''
+ Check if path is somewhere in hierarchy under ancestor.
+ '''
+ return path == ancestor or path.startswith(ancestor + os.sep)
+
+def path_rebase(path, old_base, new_base):
+ '''
+ Return a path name that has the same relative path to new_base as path
+ had to old_base. Assumes path is a descendant of old_base.
+ '''
+ if path == old_base:
+ return new_base
+ return os.path.normpath(os.path.join(new_base,
+ os.path.relpath(path, old_base)))
+
+
+def for_all_parents(path, func):
+ '''
+ Invoke func for each parent path.
+ '''
+ d = os.path.dirname(path)
+ while d != "":
+ func(d)
+ d = os.path.dirname(d)
+
+class InvalidUsageException(Exception):
+ '''
+ Raised if command line arguments are invalid
+ '''
+ def __init__(self, cmd, msg):
+ Exception.__init__(self, msg)
+ self.cmd = cmd
+
+
+class NotImplementedException(Exception):
+ '''
+ Raised if some code path is not implemented
+ '''
+ pass
+
+
+# Indexes into FSO.state
+S_WC = 0
+S_IM = 1
+
+class FSO(object):
+ '''
+ File system object (file/dir either in imported dir or in WC)
+ '''
+ def __init__(self):
+ self.wc_path = None
+ self.state = [ "-", "-" ] # '-': absent, 'F': file, 'D': dir, 'L': symlink
+
+ def status(self):
+ return "[%s%s]" % (self.state[S_WC], self.state[S_IM])
+
+ def orig_reference(self, curpath):
+ if self.wc_path and self.wc_path != curpath:
+ return " (original: %s)" % shlex.quote(self.wc_path)
+ return ""
+
+
+class FSOCollection(dict):
+ '''
+ Collection of FSOs
+ '''
+ def print(self):
+ print(" / Status in working copy (-:absent, F:file, D:dir, L:link)")
+ print(" |/ Status in imported sources (-:absent, F:file, D:dir, L:link)")
+ for k in sorted(self.keys(), key=filename_sort_key):
+ e = self[k]
+ print("%s %s%s" % (e.status(), shlex.quote(k),
+ e.orig_reference(k)))
+
+ def get(self, path):
+ 'Get existing FSO or create a new one'
+ if path in self:
+ return self[path]
+ e = FSO()
+ self[path] = e
+ return e
+
+ def add(self, path, where, kind):
+ 'Adding entries during initial scan'
+ path = os.path.normpath(path)
+ e = self.get(path)
+ e.state[where] = kind
+ if where == S_WC:
+ e.wc_path = path
+
+ def wc_copy(self, src, dst):
+ 'Handle move in a working copy'
+ keys = list(self.keys())
+ for k in keys:
+ if descendant_or_self(k, src):
+ esrc = self[k]
+ if esrc.state[S_WC] == "-":
+ continue
+ kn = path_rebase(k, src, dst)
+ edst = self.get(kn)
+ if edst.state[S_WC] != "-":
+ # Copying into existing destination.
+ # Caller should've checked this.
+ raise NotImplementedException
+ edst.wc_path = esrc.wc_path
+ edst.state[S_WC] = esrc.state[S_WC]
+
+ def wc_remove(self, path):
+ 'Handle removal in a working copy'
+ keys = list(self.keys())
+ for k in keys:
+ if descendant_or_self(k, path):
+ self[k].state[S_WC] = "-"
+
+
+class ConfigOpt(object):
+ 'Helper class - single option (string)'
+ def __init__(self, value, helpmsg):
+ self.value = value
+ self.helpmsg = helpmsg
+
+ def set(self, new_value):
+ self.value = new_value
+
+ def __str__(self):
+ return "<none>" if self.value is None else "`%s'" % self.value
+
+
+class ConfigOptInt(ConfigOpt):
+ 'Helper class - single option (integer)'
+ def set(self, new_value):
+ try:
+ self.value = int(new_value)
+ except ValueError:
+ raise InvalidUsageException(None, "Value must be integer")
+
+ def __str__(self):
+ return "%d" % self.value
+
+
+class Config(dict):
+ '''
+ Store configuration options.
+ '''
+ def add_option(self, name, cfgopt):
+ self[name] = cfgopt
+
+ def set(self, name, value):
+ if name not in self:
+ raise InvalidUsageException(None,
+ "Unknown config variable '%s'" % name)
+ self[name].set(value)
+
+ def get(self, name):
+ if name not in self:
+ raise NotImplementedException()
+ return self[name].value
+
+ def print(self):
+ for k in sorted(self):
+ o = self[k]
+ for s in o.helpmsg.split('\n'):
+ print("# %s" % s)
+ print("%-20s: %s" % (k, str(o)))
+ print("")
+
+
+class SvnVndImport(cmd.Cmd):
+ '''
+ Main driving class.
+ '''
+ intro = "Welcome to SVN vendor import helper. " + \
+ "Type help or ? to list commands.\n"
+ prompt = "(svn-vendor) "
+ prepare_ops = []
+
+ def __init__(self, wcdir, importdir, svninfo):
+ cmd.Cmd.__init__(self)
+ self.wcdir = wcdir
+ self.importdir = importdir
+ self.svninfo = svninfo
+ self.config = Config()
+ self.config.add_option('symlink-handling',
+ ConfigOpt("as-is", "How symbolic links are handled;\n" +
+ " 'dereference' treats as normal files/dirs (and " +
+ "ignores dangling links);\n" +
+ " 'as-is' imports as symlinks"))
+ self.config.add_option('exec-permission',
+ ConfigOpt("preserve", "How 'executable' permission bits " +
+ "are handled;\n" +
+ " 'preserve' sets svn:executable property as in " +
+ "imported sources;\n" +
+ " 'clear' removes svn:executable on all new files " +
+ "(but keeps it intact on existing files)."))
+ self.config.add_option('save-diff-copied',
+ ConfigOpt(None, "Save 'svn diff' output on the " +
+ "moved/copied files and directories to this " +
+ "file as part of 'apply'"))
+ self.config.add_option('dir-similarity',
+ ConfigOptInt(600, "Similarity between dirs to assume " +
+ "a copy/move [0..1000]"))
+ self.config.add_option('file-similarity',
+ ConfigOptInt(600, "Similarity between files to assume a " +
+ "copy/move [0..1000]"))
+ self.config.add_option('file-min-lines',
+ ConfigOptInt(10, "Minimal number of lines in a file for " +
+ "meaningful comparison"))
+ self.config.add_option('verbose',
+ ConfigOptInt(3, "Verbosity of the output [0..5]"))
+ try:
+ self.termwidth = os.get_terminal_size()[0]
+ except OSError:
+ # Not running in a terminal - probably redirected to file
+ self.termwidth = 150 # arbitrary number
+
+ def info(self, level, msg):
+ 'Print message with specified verbosity'
+ if level <= self.config.get('verbose'):
+ print(msg, flush=True)
+
+ def scan(self):
+ self.items = FSOCollection()
+ self.info(1, "Scanning working copy directory...")
+ self.get_lists(self.wcdir, S_WC, False)
+ self.info(1, "Scanning imported directory...")
+ self.get_lists(self.importdir, S_IM,
+ self.config.get('symlink-handling') == "dereference")
+
+ def get_lists(self, top, where, deref):
+ for d, dn, fn in os.walk(top, followlinks=deref):
+ dr = os.path.relpath(d, top)
+ # If under .svn directory at the top (SVN 1.7+) or has .svn
+ # in the path (older SVN), ignore
+ if descendant_or_self(dr, '.svn') or \
+ os.path.basename(dr) == '.svn' or \
+ (os.sep + '.svn' + os.sep) in dr:
+ continue
+ if dr != '.':
+ self.items.add(dr, where, "D")
+ dnn = [] # List where we'll descend
+ for f in fn + dn:
+ fr = os.path.normpath(os.path.join(dr, f))
+ frp = os.path.join(d, f)
+ if os.path.islink(frp):
+ if deref:
+ # Dereferencing:
+ # - check for dangling/absolute/out-of-tree symlinks and abort
+ rl = os.readlink(frp)
+ if not os.path.exists(frp):
+ self.info(1, "WARN: Ignoring dangling symlink %s -> %s" % (fr, rl))
+ continue
+ if os.path.isabs(rl):
+ self.info(1, "WARN: Ignoring absolute symlink %s -> %s" % (fr, rl))
+ continue
+ tgt = os.path.normpath(os.path.join(dr, rl))
+ if tgt == ".." or tgt.startswith(".." + os.sep):
+ self.info(1, "WARN: Ignoring out-of-wc symlink %s -> %s" % (fr, rl))
+ continue
+ else:
+ # Importing symlinks as-is, no need to check.
+ self.items.add(fr, where, "L")
+ continue
+ # If we get here, treat symlinks to files as regular files, and add directories
+ # to the list of traversed subdirs
+ if os.path.isfile(frp):
+ self.items.add(fr, where, "F")
+ if os.path.isdir(frp):
+ dnn.append(f)
+ dn[:] = dnn
+
+ def onecmd(self, str):
+ 'Override for checking number of arguments'
+ try:
+ return cmd.Cmd.onecmd(self, str)
+ except InvalidUsageException as e:
+ if e.cmd is not None:
+ print("!!! Invalid usage of `%s' command: %s" % (e.cmd, e))
+ print("")
+ self.onecmd("help " + e.cmd)
+ else:
+ print("!!! %s" % e)
+
+ def parse_args(self, line, nargs, cmd):
+ 'Parse arguments for a command'
+ args = shlex.split(line)
+ if len(args) != nargs:
+ raise InvalidUsageException(cmd, "expect %d arguments" % nargs)
+ return args
+
+ def run_svn(self, args_fixed, args_split=[]):
+ 'Run SVN command(s), potentially splitting long argument lists'
+ rv = True
+ pos = 0
+ atatime = 100
+ output = ""
+ # svn treats '@' specially (peg revision); if there's such character in a
+ # file name - append an empty peg revision
+ args_fixed = list(map(lambda x: x + "@" if x.find("@") != -1 else x, args_fixed))
+ args_split = list(map(lambda x: x + "@" if x.find("@") != -1 else x, args_split))
+ while pos < len(args_split) or (pos == 0 and len(args_split) == 0):
+ svnargs = ['svn'] + args_fixed + args_split[pos : pos + atatime]
+ pos += atatime
+ self.info(5, "Running: " + " ".join(map(shlex.quote, svnargs)))
+ p = subprocess.Popen(args=svnargs, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, cwd=self.wcdir)
+ so, se = p.communicate()
+ if p.returncode != 0:
+ print("`%s' exited with %d status:" %
+ (" ".join(map(shlex.quote, svnargs)), p.returncode))
+ print(se.decode())
+ rv = False
+ else:
+ output += so.decode()
+ return rv, output
+
+ def copy_or_move(self, op, src, dst):
+ 'Handle copy or move operation'
+ if src not in self.items or self.items[src].state[S_WC] == "-":
+ raise InvalidUsageException(None,
+ "Nothing known about `%s'" % src)
+ if dst in self.items and self.items[dst].state[S_WC] != "-":
+ raise InvalidUsageException(None,
+ "Destination path `%s' already exists" % dst)
+ # Check that we're not creating dst under a file (not a dir)
+ new_dirs = []
+ def check_parent(d):
+ if d not in self.items or self.items[d].state[S_WC] == "-":
+ new_dirs.append(d)
+ elif self.items[d].state[S_WC] == "F":
+ raise InvalidUsageException(None,
+ "Destination path `%s' created under `%s' " +
+ "which is a file" % (dst, d))
+ for_all_parents(dst, check_parent)
+ # All ok, record new directories that may be created
+ for d in new_dirs:
+ self.items.get(d).state[S_WC] = "D"
+ # Record the operation and update the FSO collection
+ self.prepare_ops.append((op, src, dst))
+ self.items.wc_copy(src, dst)
+ if op == "mv":
+ self.items.wc_remove(src)
+
+ def remove(self, path):
+ if path not in self.items or self.items[path].state[S_WC] == "-":
+ raise InvalidUsageException(None,
+ "Nothing known about `%s'" % path)
+ self.prepare_ops.append(("rm", path))
+ self.items.wc_remove(path)
+
+ def similarity_file(self, src, dst, threshold, lst_removal):
+ 'Compare two files, return similarity ratio on 0..1000 scale'
+ if self.items[src].state[S_WC] != "F":
+ return 0
+ # Source is in working copy
+ fn1 = os.path.join(self.wcdir, self.items[src].wc_path)
+ # Destination is in imported dir
+ fn2 = os.path.join(self.importdir, dst)
+ minlines = self.config.get('file-min-lines')
+ try:
+ f1 = open(fn1, 'r')
+ l1 = f1.readlines()
+ f1.close()
+ if len(l1) < minlines:
+ return 0
+ f2 = open(fn2, 'r')
+ l2 = f2.readlines()
+ f2.close()
+ if len(l2) < minlines:
+ return 0
+ sm = difflib.SequenceMatcher(a=l1, b=l2)
+ return int(1000 * sm.quick_ratio())
+ except UnicodeDecodeError:
+ # Oops, file seems to be binary. Fall back to comparing whole
+ # file contents.
+ if filecmp.cmp(fn1, fn2, shallow=False):
+ return 1000
+ return 0
+
+ def _similarity_dir(self, src, dst, get_file_similarity, lst_removal):
+ 'Iterate over FSOs, using callback to compare file entries'
+ common = 0
+ total = 0
+ for xsrc in self.items:
+ if xsrc.startswith(src + os.sep):
+ esrc = self.items[xsrc]
+ if esrc.state[S_WC] == "-":
+ # Source not in WC - ignore for similarity calculation
+ continue
+ skip = False
+ if lst_removal is not None:
+ for i in lst_removal:
+ if descendant_or_self(xsrc, i):
+ skip = True
+ if skip:
+ # Moved to another place, do not consider in score
+ continue
+ total += 1000
+ xdst = path_rebase(xsrc, src, dst)
+ if xdst not in self.items:
+ # Destination not in imported sources - non-similar item
+ continue
+ edst = self.items[xdst]
+ if edst.state[S_IM] == esrc.state[S_WC]:
+ if esrc.state[S_WC] == "D":
+ common += 1000
+ else:
+ common += get_file_similarity(xsrc, xdst)
+ if total == 0:
+ # No files/subdirs in source directory - avoid copying empty dirs
+ return 0
+ return 1000 * common / total
+
+ def similarity_dir(self, src, dst, threshold, lst_removal):
+ '''
+ Compare two dirs recursively, return similarity ratio on
+ 0..1000 scale.
+ '''
+ common = 0
+ total = 0
+ # Quickly estimate upper boundary by comparing file names. Only
+ # concern ourselves with files in source directory. I.e., if
+ # files were added after the move in the destination directory,
+ # it's ok. If most of the files from the source directory were
+ # removed, the directory is not considered similar - instead,
+ # file move detection would move files one by one.
+ upper = self._similarity_dir(src, dst, lambda s, d: 1000, lst_removal)
+ if upper <= threshold:
+ # Even the best estimate is worse than current cut-off
+ return 0
+ # Okay, looks roughly similar. Now redo the above procedure, but also
+ # compare the file content.
+ return self._similarity_dir(src, dst,
+ lambda s, d: self.similarity_file(s, d, 0, lst_removal),
+ lst_removal)
+
+ def similar(self, src, dst, threshold=0, lst_removal=None):
+ 'Compare two FSOs, source in WC and destination in imported dir'
+ if src not in self.items:
+ print("Source `%s' not in the working copy" % src)
+ return
+ xsrc = self.items[src]
+ if xsrc.state[S_WC] == "-":
+ print("Source `%s' not in the working copy" % src)
+ return
+ if dst not in self.items:
+ print("Destination `%s' not in imported sources" % dst)
+ return
+ xdst = self.items[dst]
+ if xdst.state[S_IM] == "-":
+ print("Destination `%s' not in imported sources" % dst)
+ return
+ if xsrc.state[S_WC] != xdst.state[S_IM]:
+ # Different kinds - definitely not the same object
+ return 0
+ if xsrc.state[S_WC] == "L" or xdst.state[S_IM] == "L":
+ # Symlinks are not considered the same object (same target in
+ # different dirs refers to different objects).
+ return 0
+ if xsrc.state[S_WC] == "D":
+ return self.similarity_dir(src, dst, threshold, lst_removal)
+ else:
+ return self.similarity_file(src, dst, threshold, lst_removal)
+
+ def handle_op(self, op_tuple):
+ 'Handle one SVN operation, recorded as a tuple'
+ def x_mv(src, dst):
+ self.info(2, " Move `%s' to `%s'" % (src, dst))
+ self.copy_or_move("mv", src, dst)
+ def x_cp(src, dst):
+ self.info(2, " Copy `%s' to `%s'" % (src, dst))
+ self.copy_or_move("cp", src, dst)
+ def x_rm(path):
+ self.info(2, " Remove `%s'" % path)
+ self.remove(path)
+ known_ops = {
+ # key: (nargs, handler)
+ 'cp' : (3, x_cp),
+ 'mv' : (3, x_mv),
+ 'rm' : (2, x_rm),
+ }
+ if len(op_tuple) == 0:
+ raise InvalidUsageException
+ op = op_tuple[0]
+ if op not in known_ops:
+ return False
+ nargs, func = known_ops[op]
+ if nargs != len(op_tuple):
+ return False
+ func(*op_tuple[1:])
+ return True
+
+ def detect(self, thresholds):
+ 'Helper for finding copy/move destinations'
+ ilst = []
+ wlst = {}
+ ilst_map = {}
+ for p in self.items:
+ e = self.items[p]
+ if e.state[S_WC] != "-" and e.state[S_IM] == "-":
+ wlst[p] = [] # wlst hash stores copy destinations
+ elif e.state[S_WC] == "-" and e.state[S_IM] != "-":
+ # ilst just lists destination paths as tuples with node kind
+ ilst.append((e.state[S_IM], p))
+ iteration = 0
+ # Do not apply operations immediately - we'll need to post-process
+ # them to account for files/dirs moved inside a moved parent dir.
+ ops = []
+ to_be_removed = []
+ def get_renamed_name(path, rename_ops):
+ '''
+ Check if path was renamed/removed in the recorded operations,
+ return new name.
+ '''
+ for op_tuple in rename_ops:
+ # Since copies do not remove the source file, ignore them.
+ # We push no 'rm' ops in this function
+ if op_tuple[0] == "mv":
+ src = op_tuple[1]
+ dst = op_tuple[2]
+ if descendant_or_self(path, src):
+ path = path_rebase(path, src, dst)
+ return path
+
+ while len(wlst):
+ iteration += 1
+ self.info(2, ("Iteration %d: Possible sources: %d, " +
+ "possible destinations: %d") %
+ (iteration, len(wlst), len(ilst)))
+ ndst = len(ilst)
+ for idx, (nk, dst) in enumerate(sorted(ilst,
+ key=lambda s: filename_sort_key(s[1]))):
+ class SkipDestFile(Exception):
+ pass
+ # Check if moved as a part of a parent directory.
+ def check_moved_parent(xdst):
+ if xdst in ilst_map:
+ src = path_rebase(dst, xdst, ilst_map[xdst])
+ # Did it exist in copied directory?
+ if src in self.items and \
+ self.items[src].state[S_WC] == nk:
+ sim = self.similar(src, dst, thresholds[nk],
+ to_be_removed)
+ if sim > thresholds[nk]:
+ self.info(2, (" [%04d/%04d] Skipping `%s' " +
+ "(copied as part of `%s')") %
+ (idx, ndst, dst, xdst))
+ raise SkipDestFile
+ # Copied, not similar - search for other sources
+ raise StopIteration
+ try:
+ for_all_parents(dst, check_moved_parent)
+ except SkipDestFile:
+ continue
+ except StopIteration:
+ pass
+ self.info(2, (" [%04d/%04d] Looking for possible source " +
+ "for `%s'") % (idx, ndst, dst))
+ bestsrc = None
+ # Won't even consider those lower than threshold
+ bestsim = thresholds[nk]
+ for src in sorted(wlst.keys(),
+ key=lambda x: name_similarity(x, dst)):
+ sim = self.similar(src, dst, bestsim, to_be_removed)
+ if sim > bestsim:
+ self.info(3, " [similarity %4d] %s" % (sim, src))
+ bestsim = sim
+ bestsrc = src
+ if bestsim == 1000:
+ # No chance we're finding anything better
+ break
+ if bestsrc is not None:
+ wlst[bestsrc].append(dst)
+ ilst_map[dst] = bestsrc
+
+ # Discovered all copies/moves, now record them.
+ new_wlst = {}
+ for src in sorted(wlst.keys(), key=filename_sort_key):
+ dlist = wlst[src]
+ if len(dlist) == 0:
+ continue
+ if len(dlist) == 1:
+ ops.append(("mv", src, dlist[0]))
+ to_be_removed.append(src)
+ else:
+ # We don't remove the source here, it will be done when
+ # the changes are applied (it will remove all the WC files
+ # not found in imported sources). Avoiding removal here
+ # simplifies operation sorting below, since we would not
+ # be concerned with source file/dir disappearing before
+ # it is copied to its destination.
+ to_be_removed.append(src)
+ for d in dlist:
+ ops.append(("cp", src, d))
+ # If we copied something - recheck parent source directories.
+ # Since some source file/dir was scheduled to be removed,
+ # this may have increased the similarity to some destination.
+ def recheck_parent(x):
+ if x in wlst and len(wlst) == 0:
+ new_wlst[x] = []
+ for_all_parents(src, recheck_parent)
+
+ # At this point, if we're going to have the next iteration, we
+ # are only concerned about directories (by the way new_wlst is
+ # created above). So, filter out all files from ilst as well.
+ wlst = new_wlst
+ ilst = list(filter(lambda t: t[0] == 'D', ilst))
+
+ # Finished collecting the operations - now can post-process and
+ # apply them. First, sort copies/moves by destination (so that
+ # parent directories are created before files/subdirs are
+ # copied/renamed inside)
+ ops = sorted(ops, key=lambda op: filename_sort_key(op[2]))
+ for i, op_tuple in enumerate(ops):
+ # For each operation, go over its precedents to see if the source
+ # has been renamed. If it is, find out new name.
+ op = op_tuple[0]
+ src = get_renamed_name(op_tuple[1], reversed(ops[:i]))
+ if src != op_tuple[2]:
+ # Unless it became the same file after renames
+ try:
+ # Try to remove the destination, if it existed
+ self.remove(op_tuple[2])
+ except InvalidUsageException:
+ # Okay, it didn't exist
+ pass
+ self.handle_op((op, src, op_tuple[2]))
+
+ def do_detect(self, arg):
+ '''
+ detect : auto-detect possible moves (where source/destination name
+ is unique). If not all moves are applicable, save move list,
+ edit and load.
+ '''
+ self.parse_args(arg, 0, "detect")
+ # Configurable for file/dirs; symlinks are never similar.
+ self.detect({ "D": self.config.get('dir-similarity'),
+ "F": self.config.get('file-similarity'),
+ "L": 1001 })
+
+ def do_apply(self, arg):
+ '''
+ apply : Perform copies/renames; then copy imported sources into
+ the working copy. Modifies working copy. Exits after
+ completion.
+ '''
+ self.info(1, "Copying imported sources into working copy...")
+ # Perform the recorded copies/moves/removals
+ self.info(2, " Preparatory operations (copies/renames/removals)")
+ to_be_diffed = []
+ for o in self.prepare_ops:
+ op = o[0]
+ if op == "mv":
+ self.run_svn(["mv", "--parents", o[1], o[2]])
+ to_be_diffed.append(o[2])
+ elif op == "cp":
+ self.run_svn(["cp", "--parents", o[1], o[2]])
+ to_be_diffed.append(o[2])
+ elif op == "rm":
+ # --force, as the removed path is likely created as a result
+ # of previous copy/rename
+ self.run_svn(["rm", "--force", o[1]])
+ dirs_added = []
+ dirs_removed = []
+ files_added = []
+ files_removed = []
+ files_set_exec = []
+ files_clear_exec = []
+
+ self.info(2, " Creating dirs and copying files...")
+ def copyfile_helper(i, nk_wc):
+ '''Helper: copy a file and optionally, transfer permissions.'''
+ f = os.path.join(self.importdir, i)
+ t = os.path.join(self.wcdir, i)
+ shutil.copyfile(f, t)
+ # If exec-permission is 'clear', we don't need to do anything:
+ # shutil.copyfile will create the file as non-executable.
+ if self.config.get('exec-permission') == 'preserve':
+ # If the file is new, just copying the mode is enough:
+ # svn will set the svn:executable upon adding it.
+ if nk_wc == "F":
+ # Existing file, check what the setting shall be
+ if os.access(f, os.X_OK) and not os.access(t, os.X_OK):
+ files_set_exec.append(i)
+ elif not os.access(f, os.X_OK) and os.access(t, os.X_OK):
+ files_clear_exec.append(i)
+ shutil.copymode(f, t)
+
+ for i in sorted(self.items.keys()):
+ e = self.items[i]
+ nk_wc = e.state[S_WC]
+ nk_im = e.state[S_IM]
+ flg = None
+ if nk_wc == "-":
+ # Absent in working copy
+ if nk_im == "D":
+ # Directory added
+ os.mkdir(os.path.join(self.wcdir, i))
+ dirs_added.append(i)
+ flg = "(added dir)"
+ elif nk_im == "F":
+ # New file added
+ copyfile_helper(i, nk_wc);
+ files_added.append(i)
+ flg = "(added file)"
+ elif nk_im == "L":
+ tim = os.readlink(os.path.join(self.importdir, i))
+ os.symlink(tim, os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(added symlink)"
+ else:
+ # Not in imported sources, not in WC (moved
+ # away/removed) - nothing to do
+ pass
+ elif nk_wc == "L":
+ # Symbolic link in a working copy
+ if nk_im == "L":
+ # Symbolic link in both. If the same target, do nothing. Otherwise,
+ # replace.
+ twc = os.readlink(os.path.join(self.wcdir, i))
+ tim = os.readlink(os.path.join(self.importdir, i))
+ if tim != twc:
+ self.run_svn(["rm", "--force", i])
+ os.symlink(tim, os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(replaced symlink)"
+ elif nk_im == "D":
+ # Was a symlink, now a directory. Replace.
+ self.run_svn(["rm", "--force", i])
+ os.mkdir(os.path.join(self.wcdir, i))
+ dirs_added.append(i)
+ flg = "(replaced symlink with dir)"
+ elif nk_im == "F":
+ # Symlink replaced with file.
+ self.run_svn(["rm", "--force", i])
+ copyfile_helper(i, nk_wc);
+ files_added.append(i)
+ flg = "(replaced symlink with file)"
+ else:
+ # Was a symlink, removed
+ files_removed.append(i)
+ flg = "(removed symlink)"
+ elif nk_wc == "F":
+ # File in a working copy
+ if nk_im == "D":
+ # File replaced with a directory. See comment above.
+ self.run_svn(["rm", "--force", i])
+ os.mkdir(os.path.join(self.wcdir, i))
+ dirs_added.append(i)
+ flg = "(replaced file with dir)"
+ elif nk_im == "F":
+ # Was a file, is a file - just copy contents
+ copyfile_helper(i, nk_wc);
+ flg = "(copied)"
+ elif nk_im == "L":
+ # Was a file, now a symlink. Replace.
+ self.run_svn(["rm", "--force", i])
+ tim = os.readlink(os.path.join(self.importdir, i))
+ os.symlink(tim, os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(replaced file with symlink)"
+ else:
+ # Was a file, removed
+ files_removed.append(i)
+ flg = "(removed file)"
+ elif nk_wc == "D":
+ # Directory in a working copy
+ if nk_im == "D":
+ # Was a directory, is a directory - nothing to do
+ pass
+ elif nk_im == "F":
+ # Directory replaced with file. Need to remove dir
+ # immediately, as bulk removals/additions assume new files
+ # and dirs already in place. Also, removing a directory
+ # removes all its descendants - mark them as removed.
+ self.run_svn(["rm", "--force", i])
+ self.items.wc_remove(i)
+ copyfile_helper(i, nk_wc);
+ files_added.append(i)
+ flg = "(replaced dir with file)"
+ elif nk_im == "L":
+ # Was a directory, now a symlink. Replace.
+ self.run_svn(["rm", "--force", i])
+ self.items.wc_remove(i)
+ tim = os.readlink(os.path.join(self.importdir, i))
+ os.symlink(tim, os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(replaced dir with symlink)"
+ else:
+ # Directory removed
+ dirs_removed.append(i)
+ flg = "(removed dir)"
+ if flg is not None:
+ self.info(4, " %s %s %s" % (e.status(), i, flg))
+ # Filter files/directories removed as a part of parent directory
+ files_removed = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_removed, files_removed))
+ dirs_removed = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_removed, dirs_removed))
+ files_added = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_added, files_added))
+ dirs_added = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_added, dirs_added))
+ self.info(2, " Running SVN add/rm/propset/propdel commands");
+ if len(dirs_added):
+ self.run_svn(["add"], dirs_added)
+ if len(files_added):
+ self.run_svn(["add"], files_added)
+ if len(dirs_removed):
+ self.run_svn(["rm"], dirs_removed)
+ if len(files_removed):
+ self.run_svn(["rm"], files_removed)
+ if len(files_set_exec):
+ self.run_svn(["propset", "svn:executable", "*"], files_set_exec)
+ if len(files_clear_exec):
+ self.run_svn(["propdel", "svn:executable"], files_clear_exec)
+ # Save the diff for the copied/moved items
+ diff_save = self.config.get('save-diff-copied')
+ if diff_save is not None:
+ self.info(2, " Saving 'svn diff' on copied files/dirs to `%s'" %
+ diff_save)
+ to_be_diffed = list(filter(lambda x: os.path.dirname(x) not in
+ to_be_diffed, to_be_diffed))
+ if len(to_be_diffed):
+ try:
+ rv, out = self.run_svn(["diff"], to_be_diffed)
+ except UnicodeDecodeError:
+ # Some binary files not marked with appropriate MIME type,
+ # or broken text files
+ rv, out = (True, "WARNING: diff contained binary files\n")
+ else:
+ rv, out = (True, "")
+ if rv:
+ f = open(diff_save, "w")
+ f.write(out)
+ f.close()
+ # Exiting, as the resulting working copy can no longer be used
+ # for move analysis
+ self.info(1, "Done. Exiting; please examine the working copy " +
+ "and commit.")
+ return True
+
+ def do_similarity(self, arg):
+ '''
+ similarity SRD DST : estimate whether SRC could be potential source
+ for DST (0=no match, 1000=perfect match)
+ '''
+ src, dst = self.parse_args(arg, 2, "similarity")
+ sim = self.similar(src, dst)
+ if sim is not None:
+ print("Similarity between source `%s' and destination `%s': %4d" %
+ (src, dst, sim))
+
+ def do_set(self, arg):
+ '''
+ set : display current settings
+ set CFG VAL : set a config variable
+ '''
+ if arg.strip() == '':
+ self.config.print()
+ else:
+ cfg, val = self.parse_args(arg, 2, "set")
+ self.config.set(cfg, val)
+
+ def do_move(self, arg):
+ '''
+ move SRC DST : Perform a move from source to destination
+ '''
+ src, dst = self.parse_args(arg, 2, "move")
+ self.copy_or_move("mv", src, dst)
+
+ def do_copy(self, arg):
+ '''
+ copy SRC DST : Perform a copy from source to destination
+ '''
+ src, dst = self.parse_args(arg, 2, "copy")
+ self.copy_or_move("cp", src, dst)
+
+ def do_remove(self, arg):
+ '''
+ remove PATH : Remove a path
+ '''
+ path = self.parse_args(arg, 1, "remove")[0]
+ self.copy_or_move("rm", path)
+
+ def do_lsprep(self, arg):
+ '''
+ lsprep : List the currently recorded moves/copies/removals
+ '''
+ self.parse_args(arg, 0, "lsprep")
+ colsz = int((self.termwidth - 14) / 2)
+ if len(self.prepare_ops):
+ print("Currently recorded preparatory operations:")
+ print("")
+ print("%5s %s %-*s %-*s" %
+ ("#", "Op", colsz, "Source", colsz, "Destination"))
+ for id, o in enumerate(self.prepare_ops):
+ if id % 10 == 0:
+ print("%5s %s %*s %*s" %
+ ("-"*5, "--", colsz, "-"*colsz, colsz, "-"*colsz))
+ if len(o) == 3:
+ print("%5d %s %-*s %-*s" %
+ (id, o[0], colsz, o[1], colsz, o[2]))
+ else:
+ print("%5d %s %-*s" % (id, o[0], colsz, o[1]))
+ print("")
+ else:
+ print("No copies/moves/removals recorded")
+ print("")
+
+ def do_save(self, arg):
+ '''
+ save FILENAME : Save current preparation operations to a file
+ '''
+ fn = self.parse_args(arg, 1, "save")[0]
+ f = open(fn, 'w')
+ longestname = 0
+ for o in self.prepare_ops:
+ if len(o[1]) > longestname:
+ longestname = len(o[1])
+ if len(o) == 3 and len(o[2]) > longestname:
+ longestname = len(o[2])
+ for o in self.prepare_ops:
+ if len(o) == 2:
+ f.write("svn %s %-*s\n" %
+ (o[0], longestname, shlex.quote(o[1])))
+ else:
+ f.write("svn %s %-*s %-*s\n" %
+ (o[0], longestname, shlex.quote(o[1]),
+ longestname, shlex.quote(o[2])))
+ pass
+ f.close()
+
+ def do_load(self, arg):
+ '''
+ load FILENAME : Load/append preparation operations from a file
+ '''
+ fn = self.parse_args(arg, 1, "load")[0]
+ self.info(1, "Performing operations from `%s'" % fn)
+ f = open(fn, 'r')
+ for l in f.readlines():
+ if l[0] == '#':
+ continue
+ args = shlex.split(l)
+ try:
+ if len(args) < 2 or args[0] != 'svn':
+ raise InvalidUsageException(None, "")
+ self.handle_op(args[1:])
+ except InvalidUsageException as e:
+ # Rethrow
+ raise InvalidUsageException(None,
+ "Invalid line in file: %s(%s)" % (l, e))
+ f.close()
+
+ def do_svninfo(self, arg):
+ '''
+ svninfo : Display SVN info on the working copy (debug)
+ '''
+ self.parse_args(arg, 0, "svninfo")
+ print(str(self.svninfo))
+
+ def do_printlst(self, arg):
+ '''
+ printlst WHAT : Print list of files; WHAT is one of {dir,file} (debug)
+ '''
+ self.parse_args(arg, 0, "printlst")
+ self.items.print()
+
+ def do_help(self, arg):
+ '''
+ help [COMMAND] : Print the help message
+ '''
+ cmd.Cmd.do_help(self, arg)
+
+ def do_EOF(self, arg):
+ '''
+ Quit the script
+ '''
+ return True
+
+ def do_quit(self, arg):
+ '''
+ quit : Quit the script
+ '''
+ return True
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="Prepare a working copy for SVN vendor import.")
+ parser.add_argument('wcdir',
+ help="Path to working copy (destination of import)")
+ parser.add_argument('importdir',
+ help="Path to imported sources (source of import)")
+ grp = parser.add_mutually_exclusive_group()
+ grp.add_argument('--auto', action='store_true',
+ help="Automatic mode: detect moves, apply them and copy sources")
+ grp.add_argument('--detect', metavar='FILE',
+ help="Semi-automatic mode: detect moves and save them to FILE")
+ grp.add_argument('--apply', metavar='FILE',
+ help="Semi-automatic mode: apply the moves from FILE " +
+ "and copy the sources")
+ parser.add_argument('--save', metavar='FILE',
+ help="Automatic mode: save moves to FILE after detection, " +
+ "then proceed to apply the changes")
+ parser.add_argument('--config', metavar=('OPT','VALUE'), action='append',
+ nargs=2, help="Set configuration option OPT to VALUE")
+ args = parser.parse_args()
+ p = subprocess.Popen(args=['svn', 'info', args.wcdir],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ so, se = p.communicate()
+ if p.returncode != 0:
+ print("%s: does not appear to be SVN working copy." % args.wcdir)
+ print("`svn info' exited with status %d and returned:" % p.returncode)
+ print("")
+ print(se.decode())
+ sys.exit(1)
+ imp = SvnVndImport(args.wcdir, args.importdir, so.decode())
+ if args.config:
+ try:
+ for o, v in args.config:
+ imp.config.set(o, v)
+ except InvalidUsageException as e:
+ parser.error(e)
+ imp.scan()
+ if args.auto:
+ imp.onecmd("detect")
+ if args.save:
+ imp.onecmd("save " + shlex.quote(args.save))
+ imp.onecmd("apply")
+ elif args.detect:
+ imp.onecmd("detect")
+ imp.onecmd("save " + shlex.quote(args.detect))
+ elif args.apply:
+ imp.onecmd("load " + shlex.quote(args.apply))
+ imp.onecmd("apply")
+ else:
+ imp.cmdloop()
diff --git a/tools/client-side/svn-viewspec.py b/tools/client-side/svn-viewspec.py
new file mode 100755
index 0000000..cdcd495
--- /dev/null
+++ b/tools/client-side/svn-viewspec.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+"""\
+__SCRIPTNAME__: checkout utility for sparse Subversion working copies
+
+Usage: 1. __SCRIPTNAME__ checkout VIEWSPEC-FILE TARGET-DIR
+ 2. __SCRIPTNAME__ examine VIEWSPEC-FILE
+ 3. __SCRIPTNAME__ help
+ 4. __SCRIPTNAME__ help-format
+
+VIEWSPEC-FILE is the path of a file whose contents describe a
+Subversion sparse checkouts layout, or '-' if that description should
+be read from stdin. TARGET-DIR is the working copy directory created
+by this script as it checks out the specified layout.
+
+1. Parse VIEWSPEC-FILE and execute the necessary 'svn' command-line
+ operations to build out a working copy tree at TARGET-DIR.
+
+2. Parse VIEWSPEC-FILE and dump out a human-readable representation of
+ the tree described in the specification.
+
+3. Show this usage message.
+
+4. Show information about the file format this program expects.
+
+"""
+
+FORMAT_HELP = """\
+Viewspec File Format
+====================
+
+The viewspec file format used by this tool is a collection of headers
+(using the typical one-per-line name:value syntax), followed by an
+empty line, followed by a set of one-per-line rules.
+
+The headers must contain at least the following:
+
+ Format - version of the viewspec format used throughout the file
+ Url - base URL applied to all rules; tree checkout location
+
+The following headers are optional:
+
+ Revision - version of the tree items to checkout
+
+Following the headers and blank line separator are the path rules.
+The rules are list of URLs -- relative to the base URL stated in the
+headers -- with optional annotations to specify the desired working
+copy depth of each item:
+
+ PATH/** - checkout PATH and all its children to infinite depth
+ PATH/* - checkout PATH and its immediate children
+ PATH/~ - checkout PATH and its file children
+ PATH - checkout PATH non-recursively
+
+By default, the top-level directory (associated with the base URL) is
+checked out with empty depth. You can override this using the special
+rules '**', '*', and '~' as appropriate.
+
+It is not necessary to explicitly list the parent directories of each
+path associated with a rule. If the parent directory of a given path
+is not "covered" by a previous rule, it will be checked out with empty
+depth.
+
+Examples
+========
+
+Here's a sample viewspec file:
+
+ Format: 1
+ Url: http://svn.apache.org/repos/asf/subversion
+ Revision: 36366
+
+ trunk/**
+ branches/1.5.x/**
+ branches/1.6.x/**
+ README
+ branches/1.4.x/STATUS
+ branches/1.4.x/subversion/tests/cmdline/~
+
+You may wish to version your viewspec files. If so, you can use this
+script in conjunction with 'svn cat' to fetch, parse, and act on a
+versioned viewspec file:
+
+ $ svn cat http://svn.example.com/specs/dev-spec.txt |
+ __SCRIPTNAME__ checkout - /path/to/target/directory
+
+"""
+
+#########################################################################
+### Possible future improvements that could be made:
+###
+### - support for excluded paths (PATH!)
+### - support for static revisions of individual paths (PATH@REV/**)
+###
+
+import sys
+import os
+import urllib
+
+DEPTH_EMPTY = 'empty'
+DEPTH_FILES = 'files'
+DEPTH_IMMEDIATES = 'immediates'
+DEPTH_INFINITY = 'infinity'
+
+
+class TreeNode:
+ """A representation of a single node in a Subversion sparse
+ checkout tree."""
+
+ def __init__(self, name, depth):
+ self.name = name # the basename of this tree item
+ self.depth = depth # its depth (one of the DEPTH_* values)
+ self.children = {} # its children (basename -> TreeNode)
+
+ def add_child(self, child_node):
+ child_name = child_node.name
+ assert not self.children.has_key(child_node)
+ self.children[child_name] = child_node
+
+ def dump(self, recurse=False, indent=0):
+ sys.stderr.write(" " * indent)
+ sys.stderr.write("Path: %s (depth=%s)\n" % (self.name, self.depth))
+ if recurse:
+ child_names = self.children.keys()
+ child_names.sort(svn_path_compare_paths)
+ for child_name in child_names:
+ self.children[child_name].dump(recurse, indent + 2)
+
+class SubversionViewspec:
+ """A representation of a Subversion sparse checkout specification."""
+
+ def __init__(self, base_url, revision, tree):
+ self.base_url = base_url # base URL of the checkout
+ self.revision = revision # revision of the checkout (-1 == HEAD)
+ self.tree = tree # the top-most TreeNode item
+
+def svn_path_compare_paths(path1, path2):
+ """Compare PATH1 and PATH2 as paths, sorting depth-first-ily.
+
+ NOTE: Stolen unapologetically from Subversion's Python bindings
+ module svn.core."""
+
+ path1_len = len(path1);
+ path2_len = len(path2);
+ min_len = min(path1_len, path2_len)
+ i = 0
+
+ # Are the paths exactly the same?
+ if path1 == path2:
+ return 0
+
+ # Skip past common prefix
+ while (i < min_len) and (path1[i] == path2[i]):
+ i = i + 1
+
+ # Children of paths are greater than their parents, but less than
+ # greater siblings of their parents
+ char1 = '\0'
+ char2 = '\0'
+ if (i < path1_len):
+ char1 = path1[i]
+ if (i < path2_len):
+ char2 = path2[i]
+
+ if (char1 == '/') and (i == path2_len):
+ return 1
+ if (char2 == '/') and (i == path1_len):
+ return -1
+ if (i < path1_len) and (char1 == '/'):
+ return -1
+ if (i < path2_len) and (char2 == '/'):
+ return 1
+
+ # Common prefix was skipped above, next character is compared to
+ # determine order
+ return cmp(char1, char2)
+
+def parse_viewspec_headers(viewspec_fp):
+ """Parse the headers from the viewspec file, return them as a
+ dictionary mapping header names to values."""
+
+ headers = {}
+ while 1:
+ line = viewspec_fp.readline().strip()
+ if not line:
+ break
+ name, value = [x.strip() for x in line.split(':', 1)]
+ headers[name] = value
+ return headers
+
+def parse_viewspec(viewspec_fp):
+ """Parse the viewspec file, returning a SubversionViewspec object
+ that represents the specification."""
+
+ headers = parse_viewspec_headers(viewspec_fp)
+ format = headers['Format']
+ assert format == '1'
+ base_url = headers['Url']
+ revision = int(headers.get('Revision', -1))
+ root_depth = DEPTH_EMPTY
+ rules = {}
+ while 1:
+ line = viewspec_fp.readline()
+ if not line:
+ break
+ line = line.rstrip()
+
+ # These are special rules for the top-most dir; don't fall thru.
+ if line == '**':
+ root_depth = DEPTH_INFINITY
+ continue
+ elif line == '*':
+ root_depth = DEPTH_IMMEDIATES
+ continue
+ elif line == '~':
+ root_depth = DEPTH_FILES
+ continue
+
+ # These are the regular per-path rules.
+ elif line[-3:] == '/**':
+ depth = DEPTH_INFINITY
+ path = line[:-3]
+ elif line[-2:] == '/*':
+ depth = DEPTH_IMMEDIATES
+ path = line[:-2]
+ elif line[-2:] == '/~':
+ depth = DEPTH_FILES
+ path = line[:-2]
+ else:
+ depth = DEPTH_EMPTY
+ path = line
+
+ # Add our rule to the set thereof.
+ assert not rules.has_key(path)
+ rules[path] = depth
+
+ tree = TreeNode('', root_depth)
+ paths = rules.keys()
+ paths.sort(svn_path_compare_paths)
+ for path in paths:
+ depth = rules[path]
+ path_parts = filter(None, path.split('/'))
+ tree_ptr = tree
+ for part in path_parts[:-1]:
+ child_node = tree_ptr.children.get(part, None)
+ if not child_node:
+ child_node = TreeNode(part, DEPTH_EMPTY)
+ tree_ptr.add_child(child_node)
+ tree_ptr = child_node
+ tree_ptr.add_child(TreeNode(path_parts[-1], depth))
+ return SubversionViewspec(base_url, revision, tree)
+
+def checkout_tree(base_url, revision, tree_node, target_dir, is_top=True):
+ """Checkout from BASE_URL, and into TARGET_DIR, the TREE_NODE
+ sparse checkout item. IS_TOP is set iff this node represents the
+ root of the checkout tree. REVISION is the revision to checkout,
+ or -1 if checking out HEAD."""
+
+ depth = tree_node.depth
+ revision_str = ''
+ if revision != -1:
+ revision_str = "--revision=%d " % (revision)
+ if is_top:
+ os.system('svn checkout "%s" "%s" --depth=%s %s'
+ % (base_url, target_dir, depth, revision_str))
+ else:
+ os.system('svn update "%s" --set-depth=%s %s'
+ % (target_dir, depth, revision_str))
+ child_names = tree_node.children.keys()
+ child_names.sort(svn_path_compare_paths)
+ for child_name in child_names:
+ checkout_tree(base_url + '/' + child_name,
+ revision,
+ tree_node.children[child_name],
+ os.path.join(target_dir, urllib.unquote(child_name)),
+ False)
+
+def checkout_spec(viewspec, target_dir):
+ """Checkout the view specification VIEWSPEC into TARGET_DIR."""
+
+ checkout_tree(viewspec.base_url,
+ viewspec.revision,
+ viewspec.tree,
+ target_dir)
+
+def usage_and_exit(errmsg=None):
+ stream = errmsg and sys.stderr or sys.stdout
+ msg = __doc__.replace("__SCRIPTNAME__", os.path.basename(sys.argv[0]))
+ stream.write(msg)
+ if errmsg:
+ stream.write("ERROR: %s\n" % (errmsg))
+ sys.exit(errmsg and 1 or 0)
+
+def main():
+ argc = len(sys.argv)
+ if argc < 2:
+ usage_and_exit('Not enough arguments.')
+ subcommand = sys.argv[1]
+ if subcommand == 'help':
+ usage_and_exit()
+ elif subcommand == 'help-format':
+ msg = FORMAT_HELP.replace("__SCRIPTNAME__",
+ os.path.basename(sys.argv[0]))
+ sys.stdout.write(msg)
+ elif subcommand == 'examine':
+ if argc < 3:
+ usage_and_exit('No viewspec file specified.')
+ fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r')
+ viewspec = parse_viewspec(fp)
+ sys.stdout.write("Url: %s\n" % (viewspec.base_url))
+ revision = viewspec.revision
+ if revision != -1:
+ sys.stdout.write("Revision: %s\n" % (revision))
+ else:
+ sys.stdout.write("Revision: HEAD\n")
+ sys.stdout.write("\n")
+ viewspec.tree.dump(True)
+ elif subcommand == 'checkout':
+ if argc < 3:
+ usage_and_exit('No viewspec file specified.')
+ if argc < 4:
+ usage_and_exit('No target directory specified.')
+ fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r')
+ checkout_spec(parse_viewspec(fp), sys.argv[3])
+ else:
+ usage_and_exit('Unknown subcommand "%s".' % (subcommand))
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/client-side/svnconflict/README b/tools/client-side/svnconflict/README
new file mode 100644
index 0000000..2d387fc
--- /dev/null
+++ b/tools/client-side/svnconflict/README
@@ -0,0 +1,27 @@
+svnconflict provides a non-interactive interface to SVN's conflict resolver.
+It is intended for use by non-interactive merge scripts, which cannot easily
+use interactive tree conflict resolution features provided by 'svn resolve'.
+
+svnconflict operates on a single working copy path only. It is assumed that
+scripts are able to discover conflicted paths in the working copy via other
+means, such as 'svn status'.
+
+The following operations are supported:
+
+List conflicts at PATH: svnconflict list PATH
+Show text-conflict resolution options for PATH: svnconflict options-text PATH
+Show prop-conflict resolution options for PATH: svnconflict options-prop PATH
+Show tree-conflict resolution options for PATH: svnconflict options-tree PATH
+Resolve text conflict PATH: svnconflict resolve-text OPTION_ID PATH
+Resolve prop conflict PATH: svnconflict resolve-prop PROPNAME OPTION_ID PATH
+Resolve tree conflict PATH: svnconflict resolve-tree OPTION_ID PATH
+
+Note that some advanced operations which are offered by 'svn resolve' are not
+supported, such as selecting a move target path from a set of ambiguous move
+target candidates. svnconflict will always use a default choice in such cases.
+
+svnconflict may contact the repository to obtain information about a conflict.
+It will never modify the repository, but only read information from it.
+svnconflict will not prompt for credentials. If read-access to the repository
+requires credentials but no suitable credentials are stored in Subversion's
+authentication cache or provided on the command line, the operation may fail.
diff --git a/tools/client-side/svnconflict/svnconflict.c b/tools/client-side/svnconflict/svnconflict.c
new file mode 100644
index 0000000..3ee149b
--- /dev/null
+++ b/tools/client-side/svnconflict/svnconflict.c
@@ -0,0 +1,981 @@
+/*
+ * svnconflict.c: Non-interactive conflict resolution tool for Subversion.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include <string.h>
+#include <assert.h>
+
+#include <apr_strings.h>
+#include <apr_tables.h>
+#include <apr_general.h>
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_client.h"
+#include "svn_config.h"
+#include "svn_string.h"
+#include "svn_dirent_uri.h"
+#include "svn_path.h"
+#include "svn_error.h"
+#include "svn_io.h"
+#include "svn_opt.h"
+#include "svn_utf.h"
+#include "svn_auth.h"
+#include "svn_hash.h"
+#include "svn_version.h"
+
+#include "private/svn_opt_private.h"
+#include "private/svn_cmdline_private.h"
+#include "private/svn_subr_private.h"
+
+#include "svn_private_config.h"
+
+typedef struct svnconflict_opt_state_t {
+ svn_boolean_t version; /* print version information */
+ svn_boolean_t help; /* print usage message */
+ const char *auth_username; /* auth username */
+ const char *auth_password; /* auth password */
+ const char *config_dir; /* over-riding configuration directory */
+ apr_array_header_t *config_options; /* over-riding configuration options */
+} svnconflict_opt_state_t;
+
+typedef struct svnconflict_cmd_baton_t
+{
+ svnconflict_opt_state_t *opt_state;
+ svn_client_ctx_t *ctx;
+} svnconflict_cmd_baton_t;
+
+
+/*** Option Processing ***/
+
+/* Add an identifier here for long options that don't have a short
+ option. Options that have both long and short options should just
+ use the short option letter as identifier. */
+typedef enum svnconflict_longopt_t {
+ opt_auth_password = SVN_OPT_FIRST_LONGOPT_ID,
+ opt_auth_password_from_stdin,
+ opt_auth_username,
+ opt_config_dir,
+ opt_config_options,
+ opt_version,
+} svnconflict_longopt_t;
+
+/* Option codes and descriptions.
+ * The entire list must be terminated with an entry of nulls. */
+static const apr_getopt_option_t svnconflict_options[] =
+{
+ {"help", 'h', 0, N_("show help on a subcommand")},
+ {NULL, '?', 0, N_("show help on a subcommand")},
+ {"version", opt_version, 0, N_("show program version information")},
+ {"username", opt_auth_username, 1, N_("specify a username ARG")},
+ {"password", opt_auth_password, 1,
+ N_("specify a password ARG (caution: on many operating\n"
+ " "
+ "systems, other users will be able to see this)")},
+ {"password-from-stdin",
+ opt_auth_password_from_stdin, 0,
+ N_("read password from stdin")},
+ {"config-dir", opt_config_dir, 1,
+ N_("read user configuration files from directory ARG")},
+ {"config-option", opt_config_options, 1,
+ N_("set user configuration option in the format:\n"
+ " "
+ " FILE:SECTION:OPTION=[VALUE]\n"
+ " "
+ "For example:\n"
+ " "
+ " servers:global:http-library=serf")},
+ {0, 0, 0, 0},
+};
+
+
+
+/*** Command dispatch. ***/
+
+/* Forward declarations. */
+static svn_error_t * svnconflict_help(apr_getopt_t *, void *, apr_pool_t *);
+static svn_error_t * svnconflict_list(apr_getopt_t *, void *, apr_pool_t *);
+static svn_error_t * svnconflict_options_text(apr_getopt_t *, void *,
+ apr_pool_t *);
+static svn_error_t * svnconflict_options_prop(apr_getopt_t *, void *,
+ apr_pool_t *);
+static svn_error_t * svnconflict_options_tree(apr_getopt_t *, void *,
+ apr_pool_t *);
+static svn_error_t * svnconflict_resolve_text(apr_getopt_t *, void *,
+ apr_pool_t *);
+static svn_error_t * svnconflict_resolve_prop(apr_getopt_t *, void *,
+ apr_pool_t *);
+static svn_error_t * svnconflict_resolve_tree(apr_getopt_t *, void *,
+ apr_pool_t *);
+
+/* Our array of available subcommands.
+ *
+ * The entire list must be terminated with an entry of nulls.
+ *
+ * In most of the help text "PATH" is used where a working copy path is
+ * required, "URL" where a repository URL is required and "TARGET" when
+ * either a path or a url can be used. Hmm, should this be part of the
+ * help text?
+ */
+
+/* Options that apply to all commands. */
+static const int svnconflict_global_options[] =
+{ opt_auth_username, opt_auth_password, opt_auth_password_from_stdin,
+ opt_config_dir, opt_config_options, 0 };
+
+static const svn_opt_subcommand_desc2_t svnconflict_cmd_table[] =
+{
+ /* This command is also invoked if we see option "--help", "-h" or "-?". */
+ { "help", svnconflict_help, {"?", "h"}, N_
+ ("Describe the usage of this program or its subcommands.\n"
+ "usage: help [SUBCOMMAND...]\n"),
+ {0} },
+
+ { "list", svnconflict_list, {"ls"}, N_
+ ("List conflicts at a conflicted path.\n"
+ "usage: list PATH\n"
+ "\n"
+ " List conflicts at PATH, one per line. Possible conflicts are:\n"
+ " \n"
+ " text-conflict\n"
+ " One or more text merge conflicts are present in a file.\n"
+ " This conflict can be resolved with the resolve-text subcommand.\n"
+ " \n"
+ " prop-conflict: PROPNAME\n"
+ " The property PROPNAME contains a text merge conflic conflict.\n"
+ " This conflict can be resolved with the resolve-prop subcommand.\n"
+ " \n"
+ " tree-conflict: DESCRIPTION\n"
+ " The PATH is a victim of a tree conflict described by DESCRIPTION.\n"
+ " This conflict can be resolved with the resolve-tree subcommand.\n"
+ " If a tree conflict exists, no text or property conflicts exist.\n"
+ " \n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "options-text", svnconflict_options_text, {0}, N_
+ ("List options for resolving a text conflict at path.\n"
+ "usage: options-text PATH\n"
+ "\n"
+ " List text conflict resolution options at PATH, one per line.\n"
+ " Each line contains a numeric option ID, a colon, and a description.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "options-prop", svnconflict_options_prop, {0}, N_
+ ("List options for resolving a property conflict at path.\n"
+ "usage: options-prop PATH\n"
+ "\n"
+ " List property conflict resolution options at PATH, one per line.\n"
+ " Each line contains a numeric option ID, a colon, and a description.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "options-tree", svnconflict_options_tree, {0}, N_
+ ("List options for resolving a tree conflict at path.\n"
+ "usage: options-tree PATH\n"
+ "\n"
+ " List tree conflict resolution options at PATH, one per line.\n"
+ " Each line contains a numeric option ID, a colon, and a description.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "resolve-text", svnconflict_resolve_text, {0}, N_
+ ("Resolve the text conflict at path.\n"
+ "usage: resolve-text OPTION_ID PATH\n"
+ "\n"
+ " Resolve the text conflict at PATH with a given resolution option.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "resolve-prop", svnconflict_resolve_prop, {0}, N_
+ ("Resolve the property conflict at path.\n"
+ "usage: resolve-prop PROPNAME OPTION_ID PATH\n"
+ "\n"
+ " Resolve conflicted property PROPNAME at PATH with a given resolution option.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { "resolve-tree", svnconflict_resolve_tree, {0}, N_
+ ("Resolve the tree conflict at path.\n"
+ "usage: resolve-tree OPTION_ID PATH\n"
+ "\n"
+ " Resolve the tree conflict at PATH with a given resolution option.\n"
+ " If PATH is not in conflict, the exit code will be 1, and 0 otherwise.\n"
+ ""),
+ {0}, },
+
+ { NULL, NULL, {0}, NULL, {0} }
+};
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_subr", svn_subr_version },
+ { "svn_client", svn_client_version },
+ { "svn_wc", svn_wc_version },
+ { "svn_ra", svn_ra_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+
+/*** Subcommands ***/
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_help(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svnconflict_opt_state_t *opt_state = b ? b->opt_state : NULL;
+ char help_header[] =
+ N_("usage: svnconflict <subcommand> [args]\n"
+ "Type 'svnconflict --version' to see the program version and RA modules,\n"
+ "\n"
+ "svnconflict provides a non-interactive conflict resolution interface.\n"
+ "It is intended for use by non-interactive scripts which cannot make\n"
+ "use of interactive conflict resolution provided by 'svn resolve'.\n"
+ "\n"
+ "svnconflict operates on a single working copy path only. It is assumed that\n"
+ "scripts are able to discover conflicted paths in the working copy via other\n"
+ "means, such as 'svn status'.\n"
+ "Some advanced operations offered by 'svn resolve' are not supported.\n"
+ "\n"
+ "svnconflict may contact the repository to obtain information about a conflict.\n"
+ "It will never modify the repository, but only read information from it.\n"
+ "svnconflict will not prompt for credentials. If read-access to the repository\n"
+ "requires credentials but no suitable credentials are stored in Subversion's\n"
+ "authentication cache or provided on the command line, the operation may fail.\n"
+ "\nAvailable subcommands:\n");
+ char help_footer[] =
+ N_("Subversion is a tool for version control.\n"
+ "For additional information, see http://subversion.apache.org/\n");
+ const char *ra_desc_start
+ = _("The following repository access (RA) modules are available:\n\n");
+ svn_stringbuf_t *version_footer = svn_stringbuf_create_empty(pool);
+
+ if (opt_state && opt_state->version)
+ {
+ svn_stringbuf_appendcstr(version_footer, ra_desc_start);
+ SVN_ERR(svn_ra_print_modules(version_footer, pool));
+ }
+
+ SVN_ERR(svn_opt_print_help4(os,
+ "svnconflict", /* ### erm, derive somehow? */
+ opt_state ? opt_state->version : FALSE,
+ FALSE, /* quiet */
+ FALSE, /* verbose */
+ version_footer->data,
+ _(help_header),
+ svnconflict_cmd_table,
+ svnconflict_options,
+ svnconflict_global_options,
+ _(help_footer),
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+get_conflicts(svn_boolean_t *text_conflicted,
+ apr_array_header_t **props_conflicted,
+ svn_boolean_t *tree_conflicted,
+ svn_client_conflict_t **conflict,
+ const char *local_abspath,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *pool)
+{
+ svn_boolean_t text;
+ apr_array_header_t *props;
+ svn_boolean_t tree;
+
+ SVN_ERR(svn_client_conflict_get(conflict, local_abspath, ctx, pool, pool));
+ SVN_ERR(svn_client_conflict_get_conflicted(&text, &props, &tree,
+ *conflict, pool, pool));
+
+ if (!text && props->nelts == 0 && !tree)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' is not in conflict"),
+ local_abspath);
+
+ if (text_conflicted)
+ *text_conflicted = text;
+ if (props_conflicted)
+ *props_conflicted = props;
+ if (tree_conflicted)
+ *tree_conflicted = tree;
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_list(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *props_conflicted;
+ svn_boolean_t tree_conflicted;
+ int i;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 1, pool));
+ path = APR_ARRAY_IDX(args, 0, const char *);
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(&text_conflicted, &props_conflicted, &tree_conflicted,
+ &conflict, local_abspath, ctx, pool));
+
+ if (text_conflicted)
+ svn_cmdline_printf(pool, "text-conflict\n");
+
+ for (i = 0; i < props_conflicted->nelts; i++)
+ {
+ const char *propname = APR_ARRAY_IDX(props_conflicted, i, const char *);
+ svn_cmdline_printf(pool, "prop-conflict: %s\n", propname);
+ }
+
+ if (tree_conflicted)
+ {
+ const char *incoming_change;
+ const char *local_change;
+
+ SVN_ERR(svn_client_conflict_tree_get_description(&incoming_change,
+ &local_change,
+ conflict, ctx,
+ pool, pool));
+ svn_cmdline_printf(pool, "tree-conflict: %s %s\n",
+ incoming_change, local_change);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static void
+print_conflict_options(apr_array_header_t *options, apr_pool_t *pool)
+{
+ int i;
+
+ for (i = 0; i < options->nelts; i++)
+ {
+ svn_client_conflict_option_t *option;
+ svn_client_conflict_option_id_t id;
+ const char *label;
+
+ option = APR_ARRAY_IDX(options, i, svn_client_conflict_option_t *);
+ id = svn_client_conflict_option_get_id(option);
+ label = svn_client_conflict_option_get_label(option, pool);
+ svn_cmdline_printf(pool, "%d: %s\n", id, label);
+ }
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_options_text(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+ apr_array_header_t *options;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 1, pool));
+ path = APR_ARRAY_IDX(args, 0, const char *);
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(&text_conflicted, NULL, NULL,
+ &conflict, local_abspath, ctx, pool));
+
+ if (!text_conflicted)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' has no text conflict"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_text_get_resolution_options(&options,
+ conflict, ctx,
+ pool, pool));
+ print_conflict_options(options, pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_options_prop(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ apr_array_header_t *props_conflicted;
+ apr_array_header_t *options;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 1, pool));
+ path = APR_ARRAY_IDX(args, 0, const char *);
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(NULL, &props_conflicted, NULL,
+ &conflict, local_abspath, ctx, pool));
+
+ if (props_conflicted->nelts == 0)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' has no property conflict"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_prop_get_resolution_options(&options,
+ conflict, ctx,
+ pool, pool));
+ print_conflict_options(options, pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_options_tree(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+ apr_array_header_t *options;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 1, pool));
+ path = APR_ARRAY_IDX(args, 0, const char *);
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(NULL, NULL, &tree_conflicted,
+ &conflict, local_abspath, ctx, pool));
+
+ if (!tree_conflicted)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' is not a tree conflict victim"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+ SVN_ERR(svn_client_conflict_tree_get_resolution_options(&options,
+ conflict, ctx,
+ pool, pool));
+ print_conflict_options(options, pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_resolve_text(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *option_id_str;
+ int optid;
+ svn_client_conflict_option_id_t option_id;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t text_conflicted;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 2, pool));
+ option_id_str = APR_ARRAY_IDX(args, 0, const char *);
+ path = APR_ARRAY_IDX(args, 1, const char *);
+ SVN_ERR(svn_cstring_atoi(&optid, option_id_str));
+ option_id = (svn_client_conflict_option_id_t)optid;
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(&text_conflicted, NULL, NULL,
+ &conflict, local_abspath, ctx, pool));
+
+ if (!text_conflicted)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' has no text conflict"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_text_resolve_by_id(conflict, option_id, ctx,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_resolve_prop(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *option_id_str;
+ int optid;
+ svn_client_conflict_option_id_t option_id;
+ const char *path;
+ const char *propname;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ apr_array_header_t *props_conflicted;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 3, pool));
+ propname = APR_ARRAY_IDX(args, 0, const char *);
+ option_id_str = APR_ARRAY_IDX(args, 1, const char *);
+ path = APR_ARRAY_IDX(args, 2, const char *);
+ SVN_ERR(svn_cstring_atoi(&optid, option_id_str));
+ option_id = (svn_client_conflict_option_id_t)optid;
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(NULL, &props_conflicted, NULL,
+ &conflict, local_abspath, ctx, pool));
+
+ if (props_conflicted->nelts == 0)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' has no property conflict"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_prop_resolve_by_id(conflict, propname,
+ option_id, ctx, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* This implements the `svn_opt_subcommand_t' interface. */
+static svn_error_t *
+svnconflict_resolve_tree(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svnconflict_cmd_baton_t *b = baton;
+ svn_client_ctx_t *ctx = b->ctx;
+ apr_array_header_t *args;
+ const char *option_id_str;
+ int optid;
+ svn_client_conflict_option_id_t option_id;
+ const char *path;
+ const char *local_abspath;
+ svn_client_conflict_t *conflict;
+ svn_boolean_t tree_conflicted;
+
+ SVN_ERR(svn_opt_parse_num_args(&args, os, 2, pool));
+ option_id_str = APR_ARRAY_IDX(args, 0, const char *);
+ path = APR_ARRAY_IDX(args, 1, const char *);
+ SVN_ERR(svn_cstring_atoi(&optid, option_id_str));
+ option_id = (svn_client_conflict_option_id_t)optid;
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(get_conflicts(NULL, NULL, &tree_conflicted,
+ &conflict, local_abspath, ctx, pool));
+
+ if (!tree_conflicted)
+ return svn_error_createf(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("The path '%s' is not a tree conflict victim"),
+ local_abspath);
+
+ SVN_ERR(svn_client_conflict_tree_get_details(conflict, ctx, pool));
+ SVN_ERR(svn_client_conflict_tree_resolve_by_id(conflict, option_id, ctx,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/*** Main. ***/
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ svn_error_t *err;
+ int opt_id;
+ apr_getopt_t *os;
+ svnconflict_opt_state_t opt_state = { 0 };
+ svn_client_ctx_t *ctx;
+ apr_array_header_t *received_opts;
+ svnconflict_cmd_baton_t command_baton;
+ int i;
+ const svn_opt_subcommand_desc2_t *subcommand = NULL;
+ svn_auth_baton_t *ab;
+ svn_config_t *cfg_config;
+ apr_hash_t *cfg_hash;
+ svn_boolean_t read_pass_from_stdin = FALSE;
+
+ received_opts = apr_array_make(pool, SVN_OPT_MAX_OPTIONS, sizeof(int));
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+#if defined(WIN32) || defined(__CYGWIN__)
+ /* Set the working copy administrative directory name. */
+ if (getenv("SVN_ASP_DOT_NET_HACK"))
+ {
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
+ }
+#endif
+
+ /* Initialize the RA library. */
+ SVN_ERR(svn_ra_initialize(pool));
+
+ /* No args? Show usage. */
+ if (argc <= 1)
+ {
+ SVN_ERR(svnconflict_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Else, parse options. */
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+
+ os->interleave = 1;
+ while (1)
+ {
+ const char *opt_arg;
+ const char *utf8_opt_arg;
+
+ /* Parse the next option. */
+ apr_status_t apr_err = apr_getopt_long(os, svnconflict_options, &opt_id,
+ &opt_arg);
+ if (APR_STATUS_IS_EOF(apr_err))
+ break;
+ else if (apr_err)
+ {
+ SVN_ERR(svnconflict_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Stash the option code in an array before parsing it. */
+ APR_ARRAY_PUSH(received_opts, int) = opt_id;
+
+ switch (opt_id) {
+ case 'h':
+ case '?':
+ opt_state.help = TRUE;
+ break;
+ case opt_version:
+ opt_state.version = TRUE;
+ break;
+ case opt_auth_username:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_username,
+ opt_arg, pool));
+ break;
+ case opt_auth_password:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_password,
+ opt_arg, pool));
+ break;
+ case opt_auth_password_from_stdin:
+ read_pass_from_stdin = TRUE;
+ break;
+ case opt_config_dir:
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ opt_state.config_dir = svn_dirent_internal_style(utf8_opt_arg, pool);
+ break;
+ case opt_config_options:
+ if (!opt_state.config_options)
+ opt_state.config_options =
+ apr_array_make(pool, 1,
+ sizeof(svn_cmdline__config_argument_t*));
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
+ SVN_ERR(svn_cmdline__parse_config_option(opt_state.config_options,
+ utf8_opt_arg, "svnconflict: ",
+ pool));
+ break;
+ default:
+ break;
+ }
+ }
+
+ /* ### This really belongs in libsvn_client. */
+ SVN_ERR(svn_config_ensure(opt_state.config_dir, pool));
+
+ /* If the user asked for help, then the rest of the arguments are
+ the names of subcommands to get help on (if any), or else they're
+ just typos/mistakes. Whatever the case, the subcommand to
+ actually run is svnconflict_help(). */
+ if (opt_state.help)
+ subcommand = svn_opt_get_canonical_subcommand2(svnconflict_cmd_table,
+ "help");
+
+ /* If we're not running the `help' subcommand, then look for a
+ subcommand in the first argument. */
+ if (subcommand == NULL)
+ {
+ if (os->ind >= os->argc)
+ {
+ if (opt_state.version)
+ {
+ /* Use the "help" subcommand to handle the "--version" option. */
+ static const svn_opt_subcommand_desc2_t pseudo_cmd =
+ { "--version", svnconflict_help, {0}, "",
+ {opt_version, /* must accept its own option */
+ opt_config_dir /* all commands accept this */
+ } };
+
+ subcommand = &pseudo_cmd;
+ }
+ else
+ {
+ svn_error_clear
+ (svn_cmdline_fprintf(stderr, pool,
+ _("Subcommand argument required\n")));
+ svn_error_clear(svnconflict_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ else
+ {
+ const char *first_arg;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&first_arg, os->argv[os->ind++],
+ pool));
+ subcommand = svn_opt_get_canonical_subcommand2(svnconflict_cmd_table,
+ first_arg);
+ if (subcommand == NULL)
+ {
+ svn_error_clear
+ (svn_cmdline_fprintf(stderr, pool,
+ _("Unknown subcommand: '%s'\n"),
+ first_arg));
+ svn_error_clear(svnconflict_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+
+ /* Check that the subcommand wasn't passed any inappropriate options. */
+ for (i = 0; i < received_opts->nelts; i++)
+ {
+ opt_id = APR_ARRAY_IDX(received_opts, i, int);
+
+ /* All commands implicitly accept --help, so just skip over this
+ when we see it. Note that we don't want to include this option
+ in their "accepted options" list because it would be awfully
+ redundant to display it in every commands' help text. */
+ if (opt_id == 'h' || opt_id == '?')
+ continue;
+
+ if (! svn_opt_subcommand_takes_option3(subcommand, opt_id,
+ svnconflict_global_options))
+ {
+ const char *optstr;
+ const apr_getopt_option_t *badopt =
+ svn_opt_get_option_from_code2(opt_id, svnconflict_options,
+ subcommand, pool);
+ svn_opt_format_option(&optstr, badopt, FALSE, pool);
+ if (subcommand->name[0] == '-')
+ svn_error_clear(svnconflict_help(NULL, NULL, pool));
+ else
+ svn_error_clear
+ (svn_cmdline_fprintf
+ (stderr, pool, _("Subcommand '%s' doesn't accept option '%s'\n"
+ "Type 'svnconflict help %s' for usage.\n"),
+ subcommand->name, optstr, subcommand->name));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ err = svn_config_get_config(&cfg_hash, opt_state.config_dir, pool);
+ if (err)
+ {
+ /* Fallback to default config if the config directory isn't readable
+ or is not a directory. */
+ if (APR_STATUS_IS_EACCES(err->apr_err)
+ || SVN__APR_STATUS_IS_ENOTDIR(err->apr_err))
+ {
+ svn_handle_warning2(stderr, err, "svnconflict: ");
+ svn_error_clear(err);
+
+ SVN_ERR(svn_config__get_default_config(&cfg_hash, pool));
+ }
+ else
+ return err;
+ }
+
+ /* Update the options in the config */
+ if (opt_state.config_options)
+ {
+ svn_error_clear(
+ svn_cmdline__apply_config_options(cfg_hash,
+ opt_state.config_options,
+ "svnconflict: ",
+ "--config-option"));
+ }
+
+ cfg_config = svn_hash_gets(cfg_hash, SVN_CONFIG_CATEGORY_CONFIG);
+
+ /* Get password from stdin if necessary */
+ if (read_pass_from_stdin)
+ {
+ SVN_ERR(svn_cmdline__stdin_readline(&opt_state.auth_password, pool, pool));
+ }
+
+
+ /* Create a client context object. */
+ command_baton.opt_state = &opt_state;
+ SVN_ERR(svn_client_create_context2(&ctx, cfg_hash, pool));
+ command_baton.ctx = ctx;
+
+ /* Set up Authentication stuff. */
+ SVN_ERR(svn_cmdline_create_auth_baton2(
+ &ab,
+ TRUE, /* non-interactive */
+ opt_state.auth_username,
+ opt_state.auth_password,
+ opt_state.config_dir,
+ TRUE, /* no auth cache */
+ FALSE, FALSE, FALSE, FALSE, FALSE, /* reject invalid SSL certs */
+ cfg_config,
+ NULL, NULL,
+ pool));
+
+ ctx->auth_baton = ab;
+
+ /* We don't use legacy libsvn_wc conflict handlers. */
+ {
+ ctx->conflict_func = NULL;
+ ctx->conflict_baton = NULL;
+ ctx->conflict_func2 = NULL;
+ ctx->conflict_baton2 = NULL;
+ }
+
+ /* And now we finally run the subcommand. */
+ err = (*subcommand->cmd_func)(os, &command_baton, pool);
+ if (err)
+ {
+ /* For argument-related problems, suggest using the 'help'
+ subcommand. */
+ if (err->apr_err == SVN_ERR_CL_INSUFFICIENT_ARGS
+ || err->apr_err == SVN_ERR_CL_ARG_PARSING_ERROR)
+ {
+ err = svn_error_quick_wrapf(
+ err, _("Try 'svnconflict help %s' for more information"),
+ subcommand->name);
+ }
+ if (err->apr_err == SVN_ERR_WC_UPGRADE_REQUIRED)
+ {
+ err = svn_error_quick_wrap(err,
+ _("Please see the 'svn upgrade' command"));
+ }
+
+ /* Tell the user about 'svn cleanup' if any error on the stack
+ was about locked working copies. */
+ if (svn_error_find_cause(err, SVN_ERR_WC_LOCKED))
+ {
+ err = svn_error_quick_wrap(
+ err, _("Run 'svn cleanup' to remove locks "
+ "(type 'svn help cleanup' for details)"));
+ }
+
+ if (err->apr_err == SVN_ERR_SQLITE_BUSY)
+ {
+ err = svn_error_quick_wrap(err,
+ _("Another process is blocking the "
+ "working copy database, or the "
+ "underlying filesystem does not "
+ "support file locking; if the working "
+ "copy is on a network filesystem, make "
+ "sure file locking has been enabled "
+ "on the file server"));
+ }
+
+ if (svn_error_find_cause(err, SVN_ERR_RA_CANNOT_CREATE_TUNNEL) &&
+ (opt_state.auth_username || opt_state.auth_password))
+ {
+ err = svn_error_quick_wrap(
+ err, _("When using svn+ssh:// URLs, keep in mind that the "
+ "--username and --password options are ignored "
+ "because authentication is performed by SSH, not "
+ "Subversion"));
+ }
+
+ return err;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svn", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svnconflict: ");
+ }
+
+ svn_pool_destroy(pool);
+
+ svn_cmdline__cancellation_exit();
+
+ return exit_code;
+}
diff --git a/tools/client-side/wcfind b/tools/client-side/wcfind
new file mode 100755
index 0000000..2aca83c
--- /dev/null
+++ b/tools/client-side/wcfind
@@ -0,0 +1,81 @@
+#!/bin/sh
+
+# ------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ------------------------------------------------------------
+
+# Copyright 2007 Max Bowsher
+# Licensed under the terms Subversion ships under
+
+# Runs the 'find' program, with arguments munged such that '.svn' or 'CVS'
+# working copy administrative directories and their contents are ignored.
+
+optspaths=
+expropts=
+exproptarg=
+exprmain=
+depth=
+somethingseen=
+phase=optspaths
+print=-print
+for option in "$@"; do
+ if [ "$phase" = "optspaths" ]; then
+ case $option in
+ -H|-L|-P) ;;
+ -*|"("*|")"*|,*|!*) phase=exprmain ;;
+ *) ;;
+ esac
+ fi
+ if [ "$exproptarg" = "yes" ]; then
+ exproptarg=
+ phase=expropts
+ fi
+ if [ "$phase" = "exprmain" ]; then
+ case $option in
+ -depth|-d) depth=yes ;;
+ -delete|-exec|-exec|-execdir|-execdir|-fls|-fprint|-fprint0 \
+ |-fprintf|-ok|-print|-okdir|-print0|-printf|-quit|-ls) print= ;;
+ esac
+ case $option in
+ -depth|-d|-noleaf|-mount|-xdev|-warn|-nowarn \
+ |-ignore_readdir_race|-noignore_readdir_race) phase=expropts ;;
+ -maxdepth|-mindepth|-regextype) phase=expropts; exproptarg=yes ;;
+ -follow|-daystart) ;;
+ *) somethingseen=yes
+ esac
+ fi
+ eval "$phase=\"\$$phase \\\"$option\\\"\""
+ if [ "$phase" = "expropts" ]; then
+ phase=exprmain
+ fi
+done
+
+if [ -z "$somethingseen" ]; then
+ exprmain="$exprmain -print"
+ print=
+fi
+
+if [ "$depth" = "yes" ]; then
+ eval find $optspaths $expropts \
+ -regex \''.*/\.svn'\' -o -regex \''.*/\.svn/.*'\' \
+ -o -regex \''.*/CVS'\' -o -regex \''.*/CVS/.*'\' \
+ -o '\(' $exprmain '\)' $print
+else
+ eval find $optspaths $expropts '\(' -name .svn -o -name CVS '\)' -prune \
+ -o '\(' $exprmain '\)' $print
+fi
diff --git a/tools/dev/analyze-svnlogs.py b/tools/dev/analyze-svnlogs.py
new file mode 100755
index 0000000..883b413
--- /dev/null
+++ b/tools/dev/analyze-svnlogs.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Generate a report of each area each committer has touched over all time.
+#
+# $ svn log -v ^/ > svnlogdata
+# $ ./analyze-svnlogs.py < svnlogdata > report.txt
+#
+# NOTE: ./logdata.py is written with a cached version of the data extracted
+# from 'svnlogdata'. That data can be analyzed in many ways, beyond
+# what this script is reporting.
+#
+
+import sys
+import re
+
+
+RE_LOG_HEADER = re.compile('^(r[0-9]+) '
+ '\| ([^|]+) '
+ '\| ([^|]+) '
+ '\| ([0-9]+) line')
+RE_PATH = re.compile(r' [MARD] (.*?)( \(from .*\))?$')
+SEPARATOR = '-' * 72
+
+
+def parse_one_commit(logfile):
+ line = logfile.readline().strip()
+ if line != SEPARATOR:
+ raise ParseError('missing separator: %s' % line)
+
+ line = logfile.readline()
+ if not line:
+ # end of file!
+ return None, None
+
+ m = RE_LOG_HEADER.match(line)
+ if not m:
+ raise ParseError('could not match log header')
+ revision = m.group(1)
+ author = m.group(2)
+ num_lines = int(m.group(4))
+ paths = set()
+
+ # skip "Changed paths:"
+ line = logfile.readline().strip()
+ if not line:
+ # there were no paths. just a blank before the log message. continue on.
+ sys.stderr.write('Funny revision: %s\n' % revision)
+ else:
+ if not line.startswith('Changed'):
+ raise ParseError('log not run with -v. paths missing in %s' % revision)
+
+ # gather all the affected paths
+ while 1:
+ line = logfile.readline().rstrip()
+ if not line:
+ # just hit end of the changed paths
+ break
+ m = RE_PATH.match(line)
+ if not m:
+ raise ParseError('bad path in %s: %s' % (revision, line))
+ paths.add(m.group(1))
+
+ # suck up the log message
+ for i in range(num_lines):
+ logfile.readline()
+
+ return author, paths
+
+
+def parse_file(logfile):
+ authors = { }
+
+ while True:
+ author, paths = parse_one_commit(logfile)
+ if author is None:
+ return authors
+
+ if author in authors:
+ authors[author] = authors[author].union(paths)
+ else:
+ authors[author] = paths
+
+
+def write_logdata(authors):
+ out = open('logdata.py', 'w')
+ out.write('authors = {\n')
+ for author, paths in authors.items():
+ out.write(" '%s': set([\n" % author)
+ for path in paths:
+ out.write(' %s,\n' % repr(path))
+ out.write(' ]),\n')
+ out.write('}\n')
+
+
+def get_key(sectionroots, path):
+ key = None
+ for section in sectionroots:
+ if path.startswith(section):
+ # add one path element below top section to the key.
+ elmts = len(section.split('/')) + 1
+ # strip first element (always empty because path starts with '/')
+ key = tuple(path.split('/', elmts)[1:elmts])
+ break
+ if key == None:
+ # strip first element (always empty because path starts with '/')
+ key = tuple(path.split('/', 3)[1:3])
+ return key
+
+
+def print_report(authors, sectionroots=[ ]):
+ for author, paths in sorted(authors.items()):
+ topdirs = { }
+ for path in paths:
+ key = get_key(sectionroots, path)
+ if key in topdirs:
+ topdirs[key] += 1
+ else:
+ topdirs[key] = 1
+
+ print(author)
+ tags = [ ]
+ branches = [ ]
+ for topdir in sorted(topdirs):
+ if len(topdir) == 1:
+ assert topdirs[topdir] == 1
+ print(' %s (ROOT)' % topdir[0])
+ else:
+ if topdir[0] == 'tags':
+ if not topdir[1] in tags:
+ tags.append(topdir[1])
+ elif topdir[0] == 'branches':
+ if not topdir[1] in branches:
+ branches.append(topdir[1])
+ else:
+ print(' %s (%d items)' % ('/'.join(topdir), topdirs[topdir]))
+ if tags:
+ print(' TAGS: %s' % ', '.join(tags))
+ if branches:
+ print(' BRANCHES: %s' % ', '.join(branches))
+
+ print('')
+
+
+def run(logfile):
+ try:
+ import logdata
+ authors = logdata.authors
+ except ImportError:
+ authors = parse_file(logfile)
+ write_logdata(authors)
+
+ sectionroots = [
+ '/trunk/subversion/include/private',
+ '/trunk/subversion/include',
+ '/trunk/subversion/tests',
+ '/trunk/subversion',
+ '/trunk/tools',
+ '/trunk/contrib',
+ '/trunk/doc',
+ ];
+ print_report(authors, sectionroots)
+
+
+class ParseError(Exception):
+ pass
+
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ logfile = open(sys.argv[1])
+ else:
+ logfile = sys.stdin
+ run(logfile)
diff --git a/tools/dev/aprerr.txt b/tools/dev/aprerr.txt
new file mode 100644
index 0000000..281c424
--- /dev/null
+++ b/tools/dev/aprerr.txt
@@ -0,0 +1,139 @@
+# This file is used by which-error.py and gen_base.py:write_errno_table()
+APR_SUCCESS = 0
+SOCBASEERR = 10000
+SOCEPERM = 10001
+SOCESRCH = 10003
+SOCEINTR = 10004
+SOCENXIO = 10006
+SOCEBADF = 10009
+SOCEACCES = 10013
+SOCEFAULT = 10014
+SOCEINVAL = 10022
+SOCEMFILE = 10024
+SOCEPIPE = 10032
+SOCEWOULDBLOCK = 10035
+SOCEINPROGRESS = 10036
+SOCEALREADY = 10037
+SOCENOTSOCK = 10038
+SOCEDESTADDRREQ = 10039
+SOCEMSGSIZE = 10040
+SOCEPROTOTYPE = 10041
+SOCENOPROTOOPT = 10042
+SOCEPROTONOSUPPORT = 10043
+SOCESOCKTNOSUPPORT = 10044
+SOCEOPNOTSUPP = 10045
+SOCEPFNOSUPPORT = 10046
+SOCEAFNOSUPPORT = 10047
+SOCEADDRINUSE = 10048
+SOCEADDRNOTAVAIL = 10049
+SOCENETDOWN = 10050
+SOCENETUNREACH = 10051
+SOCENETRESET = 10052
+SOCECONNABORTED = 10053
+SOCECONNRESET = 10054
+SOCENOBUFS = 10055
+SOCEISCONN = 10056
+SOCENOTCONN = 10057
+SOCESHUTDOWN = 10058
+SOCETOOMANYREFS = 10059
+SOCETIMEDOUT = 10060
+SOCECONNREFUSED = 10061
+SOCELOOP = 10062
+SOCENAMETOOLONG = 10063
+SOCEHOSTDOWN = 10064
+SOCEHOSTUNREACH = 10065
+SOCENOTEMPTY = 10066
+APR_UTIL_ERRSPACE_SIZE = 20000
+APR_OS_START_ERROR = 20000
+APR_ENOSTAT = 20001
+APR_ENOPOOL = 20002
+APR_EBADDATE = 20004
+APR_EINVALSOCK = 20005
+APR_ENOPROC = 20006
+APR_ENOTIME = 20007
+APR_ENODIR = 20008
+APR_ENOLOCK = 20009
+APR_ENOPOLL = 20010
+APR_ENOSOCKET = 20011
+APR_ENOTHREAD = 20012
+APR_ENOTHDKEY = 20013
+APR_EGENERAL = 20014
+APR_ENOSHMAVAIL = 20015
+APR_EBADIP = 20016
+APR_EBADMASK = 20017
+APR_EDSOOPEN = 20019
+APR_EABSOLUTE = 20020
+APR_ERELATIVE = 20021
+APR_EINCOMPLETE = 20022
+APR_EABOVEROOT = 20023
+APR_EBADPATH = 20024
+APR_EPATHWILD = 20025
+APR_ESYMNOTFOUND = 20026
+APR_EPROC_UNKNOWN = 20027
+APR_ENOTENOUGHENTROPY = 20028
+APR_OS_ERRSPACE_SIZE = 50000
+APR_OS_START_STATUS = 70000
+APR_INCHILD = 70001
+APR_INPARENT = 70002
+APR_DETACH = 70003
+APR_NOTDETACH = 70004
+APR_CHILD_DONE = 70005
+APR_CHILD_NOTDONE = 70006
+APR_TIMEUP = 70007
+APR_INCOMPLETE = 70008
+APR_BADCH = 70012
+APR_BADARG = 70013
+APR_EOF = 70014
+APR_NOTFOUND = 70015
+APR_ANONYMOUS = 70019
+APR_FILEBASED = 70020
+APR_KEYBASED = 70021
+APR_EINIT = 70022
+APR_ENOTIMPL = 70023
+APR_EMISMATCH = 70024
+APR_EBUSY = 70025
+APR_UTIL_START_STATUS = 100000
+APR_ENOKEY = 100001
+APR_ENOIV = 100002
+APR_EKEYTYPE = 100003
+APR_ENOSPACE = 100004
+APR_ECRYPT = 100005
+APR_EPADDING = 100006
+APR_EKEYLENGTH = 100007
+APR_ENOCIPHER = 100008
+APR_ENODIGEST = 100009
+APR_ENOENGINE = 100010
+APR_EINITENGINE = 100011
+APR_EREINIT = 100012
+APR_OS_START_USEERR = 120000
+APR_OS_START_USERERR = 120000
+APR_OS_START_CANONERR = 620000
+APR_EACCES = 620001
+APR_EEXIST = 620002
+APR_ENAMETOOLONG = 620003
+APR_ENOENT = 620004
+APR_ENOTDIR = 620005
+APR_ENOSPC = 620006
+APR_ENOMEM = 620007
+APR_EMFILE = 620008
+APR_ENFILE = 620009
+APR_EBADF = 620010
+APR_EINVAL = 620011
+APR_ESPIPE = 620012
+APR_EAGAIN = 620013
+APR_EINTR = 620014
+APR_ENOTSOCK = 620015
+APR_ECONNREFUSED = 620016
+APR_EINPROGRESS = 620017
+APR_ECONNABORTED = 620018
+APR_ECONNRESET = 620019
+APR_ETIMEDOUT = 620020
+APR_EHOSTUNREACH = 620021
+APR_ENETUNREACH = 620022
+APR_EFTYPE = 620023
+APR_EPIPE = 620024
+APR_EXDEV = 620025
+APR_ENOTEMPTY = 620026
+APR_EAFNOSUPPORT = 620027
+APR_OS_START_EAIERR = 670000
+APR_OS_START_SYSERR = 720000
diff --git a/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp b/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp
new file mode 100644
index 0000000..06ef6f5
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp
@@ -0,0 +1,55 @@
+/* ClearMemory.cpp --- A simple Window memory cleaning tool
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "targetver.h"
+
+#include <Windows.h>
+
+#include <stdio.h>
+#include <tchar.h>
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+ // Get the current memory usage stats
+ MEMORYSTATUSEX statex;
+ statex.dwLength = sizeof (statex);
+ GlobalMemoryStatusEx(&statex);
+
+ // (Clean) cache memory will be listed under "available".
+ // So, allocate all available RAM, touch it and release it again.
+ unsigned char *memory = new unsigned char[statex.ullAvailPhys];
+ if (memory)
+ {
+ // Make every page dirty.
+ for (DWORDLONG i = 0; i < statex.ullAvailPhys; i += 4096)
+ memory[i]++;
+
+ // Give everything back to the OS.
+ // The in-RAM file read cache is empty now. There may still be bits in
+ // the swap file as well as dirty write buffers. But we don't care
+ // much about these here ...
+ delete memory;
+ }
+
+ return 0;
+}
+
diff --git a/tools/dev/benchmarks/RepoPerf/TimeWin.cpp b/tools/dev/benchmarks/RepoPerf/TimeWin.cpp
new file mode 100644
index 0000000..4acab99
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/TimeWin.cpp
@@ -0,0 +1,118 @@
+/* TimeWin.cpp --- A simple Windows tool inspired by Unix' "time".
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "targetver.h"
+
+#include <Windows.h>
+
+#include <stdio.h>
+#include <tchar.h>
+
+void usage()
+{
+ _tprintf(_T("Execute a command, redirect its stdout to NUL and print\n"));
+ _tprintf(_T("execution times ELAPSED\\tUSER\\tKERNEL in seconds.\n"));
+ _tprintf(_T("\n"));
+ _tprintf(_T("Usage: TimeWin.EXE COMMAND [PARAMETERS]\n"));
+}
+
+LPCTSTR skip_first_arg(LPCTSTR targv)
+{
+ LPCTSTR s = _tcschr(targv, ' ');
+ while (s && *s == ' ')
+ ++s;
+
+ return s;
+}
+
+double as_seconds(FILETIME time)
+{
+ return (double)*reinterpret_cast<LONGLONG *>(&time) / 10000000.0;
+}
+
+int _tmain(int argc, LPTSTR argv[])
+{
+ // Minimal CL help support
+ if (argc < 2 || _tcscmp(argv[1], _T("/?")) == 0)
+ {
+ usage();
+ return 0;
+ }
+
+ // Get a file handle for NUL.
+ SECURITY_ATTRIBUTES sa;
+ sa.nLength = sizeof(sa);
+ sa.lpSecurityDescriptor = NULL;
+ sa.bInheritHandle = TRUE;
+
+ HANDLE nul = CreateFile(_T("nul"), FILE_APPEND_DATA, FILE_SHARE_WRITE,
+ &sa, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
+
+ // Construct a process startup info that uses the same handles as this
+ // one but redirects stdout to NUL.
+ STARTUPINFO startup_info;
+ GetStartupInfo(&startup_info);
+ startup_info.dwFlags |= STARTF_USESTDHANDLES;
+ startup_info.hStdOutput = nul;
+
+ // Execute the command line.
+ PROCESS_INFORMATION process_info;
+ CreateProcess(NULL, _tscdup(skip_first_arg(GetCommandLine())), NULL, NULL,
+ TRUE, NORMAL_PRIORITY_CLASS, NULL, NULL, &startup_info,
+ &process_info);
+
+ // Get a handle with the needed access rights to the child process.
+ HANDLE child = INVALID_HANDLE_VALUE;
+ DuplicateHandle(GetCurrentProcess(), process_info.hProcess,
+ GetCurrentProcess(), &child,
+ PROCESS_QUERY_INFORMATION | SYNCHRONIZE, FALSE, 0);
+
+ // Wait for the child to finish.
+ // If there was problem earlier (application not found etc.), this will fail.
+ bool success = false;
+ if (WaitForSingleObject(child, INFINITE) == WAIT_OBJECT_0)
+ {
+ // Finally, query the timers and show the result
+ FILETIME start_time, end_time, user_time, kernel_time;
+ if (GetProcessTimes(child, &start_time, &end_time, &kernel_time,
+ &user_time))
+ {
+ _tprintf(_T("%1.3f\t%1.3f\t%1.3f\n"),
+ as_seconds(end_time) - as_seconds(start_time),
+ as_seconds(user_time), as_seconds(kernel_time));
+ success = true;
+ }
+ }
+
+ // In case of failure, give some indication that something went wrong.
+ if (!success)
+ _tprintf(_T("?.???\t?.???f\t?.???\n"),
+
+ // Be good citizens and clean up our mess
+ CloseHandle(child);
+ CloseHandle(process_info.hThread);
+ CloseHandle(process_info.hProcess);
+
+ CloseHandle(nul);
+
+ return 0;
+}
diff --git a/tools/dev/benchmarks/RepoPerf/copy_repo.py b/tools/dev/benchmarks/RepoPerf/copy_repo.py
new file mode 100644
index 0000000..6f40c88
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/copy_repo.py
@@ -0,0 +1,313 @@
+#!/usr/bin/env python
+#
+# copy_repo.py: create multiple, interleaved copies of a set of repositories.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import random
+import shutil
+import sys
+
+class Separators:
+ """ This class is a container for dummy / filler files.
+ It will be used to create spaces between repository
+ versions on disk, i.e. to simulate some aspect of
+ real-world FS fragmentation.
+
+ It gets initialized with some parent path as well as
+ the desired average file size and will create a new
+ such file with each call to write(). Automatic
+ sharding keeps FS specific overhead at bay. Call
+ cleanup() to eventually delete all dummy files. """
+
+ buffer = "A" * 4096
+ """ Write this non-NULL contents into the dummy files. """
+
+ def __init__(self, path, average_size):
+ """ Initialize and store all dummy files in a '__tmp'
+ sub-folder of PATH. The size of each dummy file
+ is a random value and will be slightly AVERAGE_SIZE
+ kBytes on average. A value of 0 will effectively
+ disable dummy file creation. """
+
+ self.path = os.path.join(path, '__tmp')
+ self.size = average_size
+ self.count = 0
+
+ if os.path.exists(self.path):
+ shutil.rmtree(self.path)
+
+ os.mkdir(self.path)
+
+ def write(self):
+ """ Add a new dummy file """
+
+ # Throw dice of a file size.
+ # Factor 1024 for kBytes, factor 2 for being an average.
+ size = (int)(float(self.size) * random.random() * 2 * 1024.0)
+
+ # Don't create empty files. This also implements the
+ # "average = 0 means no files" rule.
+ if size > 0:
+ self.count += 1
+
+ # Create a new shard for every 1000 files
+ subfolder = os.path.join(self.path, str(self.count / 1000))
+ if not os.path.exists(subfolder):
+ os.mkdir(subfolder)
+
+ # Create and write the file in 4k chunks.
+ # Writing full chunks will result in average file sizes
+ # being slightly above the SELF.SIZE. That's good enough
+ # for our purposes.
+ f = open(os.path.join(subfolder, str(self.count)), "wb")
+ while size > 0:
+ f.write(self.buffer)
+ size -= len(self.buffer)
+
+ f.close()
+
+ def cleanup(self):
+ """ Get rid of all the files (and folders) that we created. """
+
+ shutil.rmtree(self.path)
+
+class Repository:
+ """ Encapsulates key information of a repository. Is is being
+ used for copy sources only and contains information about
+ its NAME, PATH, SHARD_SIZE, HEAD revision and MIN_UNPACKED_REV. """
+
+ def _read_config(self, filename):
+ """ Read and return all lines from FILENAME.
+ This will be used to read 'format', 'current' etc. . """
+
+ f = open(os.path.join(self.path, 'db', filename), "rb")
+ lines = f.readlines()
+ f.close()
+
+ return lines
+
+ def __init__(self, parent, name):
+ """ Constructor collecting everything we need to know about
+ the repository NAME within PARENT folder. """
+
+ self.name = name
+ self.path = os.path.join(parent, name)
+
+ self.shard_size = int(self._read_config('format')[1].split(' ')[2])
+ self.min_unpacked_rev = int(self._read_config('min-unpacked-rev')[0])
+ self.head = int(self._read_config('current')[0])
+
+ def needs_copy(self, revision):
+ """ Return True if REVISION is a revision in this repository
+ and is "directly copyable", i.e. is either non-packed or
+ the first rev in a packed shard. Everything else is either
+ not a valid rev or already gets / got copied as part of
+ some packed shard. """
+
+ if revision > self.head:
+ return False
+ if revision < self.min_unpacked_rev:
+ return revision % self.shard_size == 0
+
+ return True
+
+ @classmethod
+ def is_repository(cls, path):
+ """ Quick check that PATH is (probably) a repository.
+ This is mainly to filter out aux files put next to
+ (not inside) the repositories to copy. """
+
+ format_path = os.path.join(path, 'db', 'format')
+ return os.path.isfile(format_path)
+
+class Multicopy:
+ """ Helper class doing the actual copying. It copies individual
+ revisions and packed shards from the one source repository
+ to multiple copies of it. The copies have the same name
+ as the source repo but with numbers 0 .. N-1 appended to it.
+
+ The copy process is being initiated by the constructor
+ (copies the repo skeleton w/o revision contents). Revision
+ contents is then copied by successive calls to the copy()
+ method. """
+
+ def _init_copy(self, number):
+ """ Called from the constructor, this will copy SELF.SOURCE_REPO
+ into NUMBER new repos below SELF.DEST_BASE but omit everything
+ below db/revs and db/revprops. """
+
+ src = self.source_repo.path
+ dst = self.dest_base + str(number)
+
+ # Copy the repo skeleton w/o revs and revprops
+ shutil.copytree(src, dst, ignore=shutil.ignore_patterns('revs', 'revprops'))
+
+ # Add revs and revprops
+ self.dst_revs.append(os.path.join(dst, 'db', 'revs'))
+ self.dst_revprops.append(os.path.join(dst, 'db', 'revprops'))
+
+ os.mkdir(self.dst_revs[number])
+ os.mkdir(self.dst_revprops[number])
+
+ def _copy_packed_shard(self, shard, number):
+ """ Copy packed shard number SHARD from SELF.SOURCE_REPO to
+ the copy NUMBER below SELF.DEST_BASE. """
+
+ # Shards are simple subtrees
+ src_revs = os.path.join(self.src_revs, str(shard) + '.pack')
+ dst_revs = os.path.join(self.dst_revs[number], str(shard) + '.pack')
+ src_revprops = os.path.join(self.src_revprops, str(shard) + '.pack')
+ dst_revprops = os.path.join(self.dst_revprops[number], str(shard) + '.pack')
+
+ shutil.copytree(src_revs, dst_revs)
+ shutil.copytree(src_revprops, dst_revprops)
+
+ # Special case: revprops of rev 0 are never packed => extra copy
+ if shard == 0:
+ src_revprops = os.path.join(self.src_revprops, '0')
+ dest_revprops = os.path.join(self.dst_revprops[number], '0')
+
+ shutil.copytree(src_revprops, dest_revprops)
+
+ def _copy_single_revision(self, revision, number):
+ """ Copy non-packed REVISION from SELF.SOURCE_REPO to the copy
+ NUMBER below SELF.DEST_BASE. """
+
+ shard = str(revision / self.source_repo.shard_size)
+
+ # Auto-create shard folder
+ if revision % self.source_repo.shard_size == 0:
+ os.mkdir(os.path.join(self.dst_revs[number], shard))
+ os.mkdir(os.path.join(self.dst_revprops[number], shard))
+
+ # Copy the rev file and the revprop file
+ src_rev = os.path.join(self.src_revs, shard, str(revision))
+ dest_rev = os.path.join(self.dst_revs[number], shard, str(revision))
+ src_revprop = os.path.join(self.src_revprops, shard, str(revision))
+ dest_revprop = os.path.join(self.dst_revprops[number], shard, str(revision))
+
+ shutil.copyfile(src_rev, dest_rev)
+ shutil.copyfile(src_revprop, dest_revprop)
+
+ def __init__(self, source, target_parent, count):
+ """ Initiate the copy process for the SOURCE repository to
+ be copied COUNT times into the TARGET_PARENT directory. """
+
+ self.source_repo = source
+ self.dest_base = os.path.join(target_parent, source.name)
+
+ self.src_revs = os.path.join(source.path, 'db', 'revs')
+ self.src_revprops = os.path.join(source.path, 'db', 'revprops')
+
+ self.dst_revs = []
+ self.dst_revprops = []
+ for i in range(0, count):
+ self._init_copy(i)
+
+ def copy(self, revision, number):
+ """ Copy (packed or non-packed) REVISION from SELF.SOURCE_REPO
+ to the copy NUMBER below SELF.DEST_BASE.
+
+ SELF.SOURCE_REPO.needs_copy(REVISION) must be True. """
+
+ if revision < self.source_repo.min_unpacked_rev:
+ self._copy_packed_shard(revision / self.source_repo.shard_size, number)
+ else:
+ self._copy_single_revision(revision, number)
+
+def copy_repos(src, dst, count, separator_size):
+ """ Under DST, create COUNT copies of all repositories immediately
+ below SRC.
+
+ All copies will "interleaved" such that we copy each individual
+ revision / packed shard to all target repos first before
+ continuing with the next revision / packed shard. After each
+ round (revision / packed shard) insert a temporary file of
+ SEPARATOR_SIZE kBytes on average to add more spacing between
+ revisions. The temp files get automatically removed at the end.
+
+ Please note that this function will clear DST before copying
+ anything into it. """
+
+ # Remove any remnants from the target folder.
+ # (DST gets auto-created by the first repo copy.)
+ shutil.rmtree(dst)
+
+ # Repositories to copy and the respective copy utilities
+ repositories = []
+ copies = []
+
+ # Find repositories, initiate copies and determine the range of
+ # revisions to copy in total
+ max_revision = 0
+ for name in os.listdir(src):
+ if Repository.is_repository(os.path.join(src, name)):
+ repository = Repository(src, name)
+ repositories.append(repository)
+ copies.append(Multicopy(repository, dst, count))
+
+ if repository.head > max_revision:
+ max_revision = repository.head
+
+ # Temp file collection (spacers)
+ separators = Separators(dst, separator_size)
+
+ # Copy all repos in revision,number-major order
+ for revision in xrange(0, max_revision + 1):
+ for number in xrange(0, count):
+
+ any_copy = False
+ for i in xrange(0, len(repositories)):
+ if repositories[i].needs_copy(revision):
+ any_copy = True
+ copies[i].copy(revision, number)
+
+ # Don't add spacers when nothing got copied (REVISION is
+ # packed in all repositories).
+ if any_copy:
+ separators.write()
+
+ # Now that all data is in position, remove the spacers
+ separators.cleanup()
+
+def show_usage():
+ """ Write a simple CL docstring """
+
+ print("Copies and duplicates repositories in a way that mimics larger deployments.")
+ print("")
+ print("Usage:")
+ print("copy_repo.py SRC DST COUNT SEPARATOR_SIZE")
+ print("")
+ print("SRC Immediate parent folder of all the repositories to copy.")
+ print("DST Folder to copy into; current contents will be lost.")
+ print("COUNT Number of copies to create of each source repository.")
+ print("SEPARATOR_SIZE Additional spacing, in kBytes, between revisions.")
+
+#main function
+if len(argv) == 5:
+ copy_repos(sys.argv[1], sys.argv[2], int(sys.argv[3]), int(sys.argv[4]))
+else:
+ show_usage()
diff --git a/tools/dev/benchmarks/RepoPerf/win_repo_bench.py b/tools/dev/benchmarks/RepoPerf/win_repo_bench.py
new file mode 100644
index 0000000..b2493d3
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/win_repo_bench.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python
+#
+# win_repo_bench.py: run repository / server performance tests on Windows.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import shutil
+import sys
+import subprocess
+import time
+
+from win32com.shell import shell, shellcon
+
+# Adapt these paths to your needs
+
+# Contains all the REPOSITORIES
+repo_parent = "C:\\repos"
+
+# Where to create working copies
+wc_path = "C:\\wc"
+exe_path = "C:\\develop\\Subversion\\trunk\\Release"
+apache_path = "C:\\develop\\Subversion"
+
+# Test these repositories and in this order.
+# Actual repository names have numbers 0 .. REPETITIONS-1 append to them
+repositories = ["ruby-f6-nonpacked", "ruby-f7-nonpacked",
+ "ruby-f6-packed", "ruby-f7-packed",
+ "bsd-f6-nonpacked", "bsd-f7-nonpacked",
+ "bsd-f6-packed", "bsd-f7-packed"]
+
+# Basically lists the RA backends to test but as long as all repositories
+# can be accessed using any of them, arbitrary URLs are possible.
+prefixes = ["svn://localhost/", "http://localhost/svn/", "file:///C:/repos/"]
+
+# Number of time to repeat the tests. For each iteration, there must be
+# a separate copy of all repositories.
+repetitions = 3
+
+# Server configurations to test
+configurations = ['slow', 'medium', 'fast']
+svnserve_params = {
+ 'slow':"",
+ 'medium':"-M 256" ,
+ 'fast':"-M 1024 -c 0 --cache-revprops yes --block-read yes --client-speed 1000"
+}
+
+
+def clear_memory():
+ """ Clear in-RAM portion of the file / disk cache """
+ subprocess.call(["ClearMemory.exe"])
+
+def start_server(prefix, config):
+ """ Depending on the url PREFIX, start the corresponding server with the
+ given CONFIGuration. file: and http: access will actually have been
+ configured by set_config(). """
+
+ if prefix[:4] == "svn:":
+ exe = os.path.join(exe_path, "svnserve.exe")
+ command = "cmd.exe /c start " + exe + " -dr " + repo_parent + \
+ " " + svnserve_params[config]
+ subprocess.call(command)
+ time.sleep(2)
+ elif prefix[:5] == "http:":
+ exe = os.path.join(apache_path, 'bin', 'httpd.exe')
+ subprocess.call(exe + " -k start")
+ time.sleep(2)
+
+def stop_server(prefix):
+ """ Depending on the url PREFIX, stop / kill the corresponding server. """
+
+ if prefix[:4] == "svn:":
+ subprocess.call("cmd.exe /c taskkill /im svnserve.exe /f > nul 2>&1")
+ time.sleep(1)
+ elif prefix[:5] == "http:":
+ exe = os.path.join(apache_path, 'bin', 'httpd.exe')
+ subprocess.call(exe + " -k stop")
+ time.sleep(1)
+
+def run_cs_command(state, config, repository, prefix, args):
+ """ Run the client-side command given in ARGS. Log the STATE of the
+ caches, the CONFIG we are using, the REPOSITORY, the url PREFIX
+ and finally the execution times. """
+
+ # Make sure we can create a new working copy if we want to.
+ if os.path.exists(wc_path):
+ shutil.rmtree(wc_path)
+
+ # Select the client to use.
+ if ('null-export' in args) or ('null-log' in args):
+ exe = os.path.join(exe_path, "svn-bench.exe")
+ else:
+ exe = os.path.join(exe_path, "svn.exe")
+
+ # Display the operation
+ repo_title = repository.replace('nonpacked', 'nopack')
+ sys.stdout.write(state, "\t", repo_title, "\t", prefix, "\t", config, "\t ")
+ sys.stdout.flush()
+
+ # Execute the command and show the execution times
+ subprocess.call(["TimeWin.exe", exe] + args)
+
+
+def run_test_cs_sequence(config, repository, run, prefix, command, args):
+ """ Run the client-side COMMAND with the given ARGS in various stages
+ of cache heat-up. Execute the test with server CONFIG on REPOSITORY
+ with the given url PREFIX. """
+
+ # Build the full URL to use. Exports operate on the main dev line only.
+ url = prefix + repository + str(run)
+ if (command == 'export') or (command == 'null-export'):
+ if repository[:3] == 'bsd':
+ url += '/head'
+ else:
+ url += '/trunk'
+
+ # Full set of command arguments
+ args = [command, url] + args
+
+ # Free up caches best we can.
+ clear_memory()
+
+ # Caches are quite cool now and ready to take up new data
+ start_server(prefix, config)
+ run_cs_command("Cold", config, repository, prefix, args)
+ stop_server(prefix)
+
+ # OS caches are quite hot now.
+ # Run operation from hot OS caches but cold SVN caches.
+ start_server(prefix, config)
+ run_cs_command("WarmOS", config, repository, prefix, args)
+ stop_server(prefix)
+
+ # OS caches may be even hotter now.
+ # Run operation from hot OS caches but cold SVN caches.
+ start_server(prefix, config)
+ run_cs_command("HotOS", config, repository, prefix, args)
+
+ # Keep server process and thus the warmed up SVN caches.
+ # Run operation from hot OS and SVN caches.
+ run_cs_command("WrmSVN", config, repository, prefix, args)
+ run_cs_command("HotSVN", config, repository, prefix, args)
+ stop_server(prefix)
+
+
+def set_config(config):
+ """ Switch configuration files to CONFIG. This overwrites the client
+ config file with config.$CONFIG and the server config file with
+ subversion.$CONFIG.conf. """
+
+ appdata = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, None, 0)
+ svn_config_folder = os.path.join(appdata, 'Subversion')
+ svn_config_file = os.path.join(svn_config_folder, 'config')
+ svn_config_template = svn_config_file + '.' + config
+
+ shutil.copyfile(svn_config_template, svn_config_file)
+
+ apache_config_folder = os.path.join(apache_path, 'conf', 'extra')
+ apache_config_file = os.path.join(apache_config_folder, 'subversion.conf')
+ apache_config_template = os.path.join(apache_config_folder,
+ 'subversion.' + config + '.conf')
+
+ shutil.copyfile(apache_config_template, apache_config_file)
+
+
+def run_test_cs_configurations(command, args):
+ """ Run client COMMAND with basic arguments ARGS in all configurations
+ repeatedly with all servers on all repositories. """
+
+ print
+ print(command)
+ print("")
+
+ for config in configurations:
+ set_config(config)
+ for prefix in prefixes:
+ # These two must be the innermost loops and must be in that order.
+ # It gives us the coldest caches and the least temporal favoritism.
+ for run in range(0, repetitions):
+ for repository in repositories:
+ run_test_cs_sequence(config, repository, run, prefix, command, args)
+
+def run_admin_command(state, config, repository, args):
+ """ Run the svnadmin command given in ARGS. Log the STATE of the
+ caches, the CONFIG we are using, the REPOSITORY and finally
+ the execution times. """
+
+ exe = os.path.join(exe_path, "svnadmin.exe")
+
+ if config == 'medium':
+ extra = ['-M', '256']
+ elif config == 'fast':
+ extra = ['-M', '1024']
+ else:
+ extra = []
+
+ sys.stdout.write(state, "\t", repository, "\t", config, "\t ")
+ sys.stdout.flush()
+ subprocess.call(["TimeWin.exe", exe] + args + extra)
+
+def run_test_admin_sequence(config, repository, run, command, args):
+ """ Run the svnadmin COMMAND with the given ARGS in various stages
+ of cache heat-up. Execute the test with server CONFIG on
+ REPOSITORY. """
+
+ # Full set of command arguments
+ path = os.path.join(repo_parent, repository + str(run))
+ args = [command, path] + args
+
+ # Free up caches best we can.
+ clear_memory()
+
+ # svnadmin runs can be quite costly and are usually CPU-bound.
+ # Test with "cold" and "hot" CPU caches only.
+ run_admin_command("Cold", config, repository, args)
+ run_admin_command("Hot", config, repository, args)
+
+
+def run_test_admin_configurations(command, args):
+ """ Run svnadmin COMMAND with basic arguments ARGS in all configurations
+ repeatedly on all repositories. """
+
+ print("")
+ print(command)
+ print("")
+
+ for config in configurations:
+ # These two must be the innermost loops and must be in that order.
+ # It gives us the coldest caches and the least temporal favoritism.
+ for run in range(0, repetitions):
+ for repository in repositories:
+ run_test_admin_sequence(config, repository, run, command, args)
+
+
+def bench():
+ """ Run all performance tests. """
+
+ run_test_cs_configurations('log', ['-v', '--limit', '50000'])
+ run_test_cs_configurations('export', [wc_path, '-q'])
+
+ run_test_cs_configurations('null-log', ['-v', '--limit', '50000', '-q'])
+ run_test_cs_configurations('null-export', ['-q'])
+
+ run_test_admin_configurations('dump', ['-q'])
+
+# main function
+bench()
diff --git a/tools/dev/benchmarks/large_dirs/create_bigdir.sh b/tools/dev/benchmarks/large_dirs/create_bigdir.sh
new file mode 100755
index 0000000..c2830c8
--- /dev/null
+++ b/tools/dev/benchmarks/large_dirs/create_bigdir.sh
@@ -0,0 +1,232 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# usage: run this script from the root of your working copy
+# and / or adjust the path settings below as needed
+
+# set SVNPATH to the 'subversion' folder of your SVN source code w/c
+
+SVNPATH="$('pwd')/subversion"
+
+# if using the installed svn, you may need to adapt the following.
+# Uncomment the VALGRIND line to use that tool instead of "time".
+# Comment the SVNSERVE line to use file:// instead of svn://.
+
+SVN=${SVNPATH}/svn/svn
+SVNADMIN=${SVNPATH}/svnadmin/svnadmin
+SVNSERVE=${SVNPATH}/svnserve/svnserve
+# VALGRIND="valgrind --tool=callgrind"
+
+# set your data paths here
+
+WC=/dev/shm/wc
+REPOROOT=/dev/shm
+
+# number of items per folder on the first run. It will be doubled
+# after every iteration. The test will stop if MAXCOUNT has been
+# reached or exceeded (and will not be executed for MAXCOUNT).
+
+FILECOUNT=1
+MAXCOUNT=20000
+
+# only 1.7 supports server-side caching and uncompressed data transfer
+
+SERVEROPTS="-c 0 -M 400"
+
+# from here on, we should be good
+
+TIMEFORMAT='%3R %3U %3S'
+REPONAME=dirs
+PORT=54321
+if [ "${SVNSERVE}" != "" ] ; then
+ URL=svn://localhost:$PORT/$REPONAME
+else
+ URL=file://${REPOROOT}/$REPONAME
+fi
+
+# create repository
+
+rm -rf $WC $REPOROOT/$REPONAME
+mkdir $REPOROOT/$REPONAME
+${SVNADMIN} create $REPOROOT/$REPONAME
+echo "[general]
+anon-access = write" > $REPOROOT/$REPONAME/conf/svnserve.conf
+
+# fire up svnserve
+
+if [ "${SVNSERVE}" != "" ] ; then
+ VERSION=$( ${SVNSERVE} --version | grep " version" | sed 's/.*\ 1\.\([0-9]\).*/\1/' )
+ if [ "$VERSION" -lt "7" ]; then
+ SERVEROPTS=""
+ fi
+
+ ${SVNSERVE} -Tdr ${REPOROOT} ${SERVEROPTS} --listen-port ${PORT} --foreground &
+ PID=$!
+ sleep 1
+fi
+
+# construct valgrind parameters
+
+if [ "${VALGRIND}" != "" ] ; then
+ VG_TOOL=$( echo ${VALGRIND} | sed 's/.*\ --tool=\([a-z]*\).*/\1/' )
+ VG_OUTFILE="--${VG_TOOL}-out-file"
+fi
+
+# print header
+
+printf "using "
+${SVN} --version | grep " version"
+echo
+
+# init working copy
+
+rm -rf $WC
+${SVN} co $URL $WC > /dev/null
+
+# helpers
+
+get_sequence() {
+ # three equivalents...
+ (jot - "$1" "$2" "1" 2>/dev/null || seq -s ' ' "$1" "$2" 2>/dev/null || python -c "for i in range($1,$2+1): print(i)")
+}
+
+# functions that execute an SVN command
+
+run_svn() {
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} $1 $WC/$2 $3 > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.$1.$2" ${SVN} $1 $WC/$2 $3 > /dev/null
+ fi
+}
+
+run_svn_del() {
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} del $WC/${1}_c/$2 -q > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.del.$1" ${SVN} del $WC/${1}_c/$2 -q > /dev/null
+ fi
+}
+
+run_svn_del_many() {
+ printf "\n" > files.lst
+ sequence=`get_sequence 2 ${1}`
+ for i in $sequence; do
+ printf "$WC/${1}_c/$i\n" >> files.lst
+ done
+
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} del -q --targets files.lst > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.del_many.$1" ${SVN} del -q --targets files.lst > /dev/null
+ fi
+}
+
+run_svn_ci() {
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} ci $WC/$1 -m "" -q > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.ci_$2.$1" ${SVN} ci $WC/$1 -m "" -q > /dev/null
+ fi
+}
+
+run_svn_cp() {
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} cp $WC/$1 $WC/$2 > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.cp.$1" ${SVN} cp $WC/$1 $WC/$2 > /dev/null
+ fi
+}
+
+run_svn_get() {
+ if [ "${VALGRIND}" = "" ] ; then
+ time ${SVN} $1 $URL $WC -q > /dev/null
+ else
+ ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.$1.$2" ${SVN} $1 $URL $WC -q > /dev/null
+ fi
+}
+
+# main loop
+
+while [ $FILECOUNT -lt $MAXCOUNT ]; do
+ echo "Processing $FILECOUNT files in the same folder"
+
+ sequence=`get_sequence 2 $FILECOUNT`
+ printf "\tCreating files ... \t real user sys\n"
+ mkdir $WC/$FILECOUNT
+ for i in 1 $sequence; do
+ echo "File number $i" > $WC/$FILECOUNT/$i
+ done
+
+ printf "\tAdding files ... \t"
+ run_svn add $FILECOUNT -q
+
+ printf "\tRunning status ... \t"
+ run_svn st $FILECOUNT -q
+
+ printf "\tCommit files ... \t"
+ run_svn_ci $FILECOUNT add
+
+ printf "\tListing files ... \t"
+ run_svn ls $FILECOUNT
+
+ printf "\tUpdating files ... \t"
+ run_svn up $FILECOUNT -q
+
+ printf "\tLocal copy ... \t"
+ run_svn_cp $FILECOUNT ${FILECOUNT}_c
+
+ printf "\tCommit copy ... \t"
+ run_svn_ci ${FILECOUNT}_c copy
+
+ printf "\tDelete 1 file ... \t"
+ run_svn_del ${FILECOUNT} 1
+
+ printf "\tDeleting files ... \t"
+ if [ "$FILECOUNT" == "1" ] ; then
+ printf " skipped (0 files to delete)\n"
+ else
+ run_svn_del_many ${FILECOUNT}
+ fi
+
+ printf "\tCommit deletions ..\t"
+ run_svn_ci ${FILECOUNT}_c del
+
+ rm -rf $WC
+
+ printf "\tExport all ... \t"
+ run_svn_get export $FILECOUNT
+
+ rm -rf $WC
+ mkdir $WC
+
+ printf "\tCheck out all ... \t"
+ run_svn_get co $FILECOUNT
+
+ FILECOUNT=`echo 2 \* $FILECOUNT | bc`
+ echo ""
+done
+
+# tear down
+
+if [ "${SVNSERVE}" != "" ] ; then
+ echo "killing svnserve ... "
+ kill $PID
+fi
+
diff --git a/tools/dev/benchmarks/suite1/benchmark.py b/tools/dev/benchmarks/suite1/benchmark.py
new file mode 100755
index 0000000..250d1d5
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/benchmark.py
@@ -0,0 +1,1309 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Usage: benchmark.py run|list|compare|show|chart <selection> ...
+
+SELECTING TIMINGS -- B@R,LxS
+
+In the subcommands below, a timings selection consists of a string with up to
+four elements:
+ <branch>@<revision>,<levels>x<spread>
+abbreviated as:
+ B@R,LxS
+
+<branch> is a label of an svn branch, e.g. "1.7.x".
+<revision> is the last-changed-revision of above branch.
+<levels> is the number of directory levels created in the benchmark.
+<spread> is the number of child trees spreading off each dir level.
+
+<branch_name> and <revision> are simply used for labeling. Upon the actual
+test runs, you should enter labels matching the selected --svn-bin-dir.
+Later, you can select runs individually by using these labels.
+
+For <revision>, you can provide special keywords:
+- 'each' has the same effect as entering each available revision number that
+ is on record in the db in a separate timings selection.
+- 'last' is the same as 'each', but shows only the last 10 revisions. 'last'
+ can be combined with a number, e.g. 'last12'.
+
+For all subcommands except 'run', you can omit some or all of the elements of
+a timings selection to combine all available timings sets. Try that out with
+the 'list' subcommand.
+
+Examples:
+ benchmark.py run 1.7.x@12345,5x5
+ benchmark.py show trunk@12345
+ benchmark.py compare 1.7.0,1x100 trunk@each,1x100
+ benchmark.py chart compare 1.7.0,5x5 trunk@last12,5x5
+
+
+RUN BENCHMARKS
+
+ benchmark.py run B@R,LxS [N] [options]
+
+Test data is added to an sqlite database created automatically, by default
+'benchmark.db' in the current working directory. To specify a different path,
+use option -f <path_to_db>.
+
+If <N> is provided, the run is repeated N times.
+
+<levels> and <spread> control the way the tested working copy is structured:
+ <levels>: number of directory levels to create.
+ <spread>: number of files and subdirectories created in each dir.
+
+
+LIST WHAT IS ON RECORD
+
+ benchmark.py list [B@R,LxS]
+
+Find entries in the database for the given constraints. Any arguments can
+be omitted. (To select only a rev, start with a '@', like '@123'; to select
+only spread, start with an 'x', like "x100".)
+
+Call without arguments to get a listing of all available constraints.
+
+
+COMPARE TIMINGS
+
+ benchmark.py compare B@R,LxS B@R,LxS [B@R,LxS [...]]
+
+Compare any number of timings sets to the first provided set (in text mode).
+For example:
+ benchmark.py compare 1.7.0 trunk@1349903
+ Compare the total timings of all combined '1.7.0' branch runs to
+ all combined runs of 'trunk'-at-revision-1349903.
+ benchmark.py compare 1.7.0,5x5 trunk@1349903,5x5
+ Same as above, but only compare the working copy types with 5 levels
+ and a spread of 5.
+
+Use the -c option to limit comparison to specific command names.
+
+
+SHOW TIMINGS
+
+ benchmark.py show B@R,LxS [B@R,LxS [...]]
+
+Print out a summary of the timings selected from the given constraints.
+
+
+GENERATE CHARTS
+
+ benchmark.py chart compare B@R,LxS B@R,LxS [ B@R,LxS ... ]
+
+Produce a bar chart that compares any number of sets of timings. Like with
+the plain 'compare' command, the first set is taken as a reference point for
+100% and +-0 seconds. Each following dataset produces a set of labeled bar
+charts, grouped by svn command names. At least two timings sets must be
+provided.
+
+Use the -c option to limit comparison to specific command names.
+
+
+EXAMPLES
+
+# Run 3 benchmarks on svn 1.7.0 with 5 dir levels and 5 files and subdirs for
+# each level (spread). Timings are saved in ./benchmark.db.
+# Provide label '1.7.0' and its Last-Changed-Rev for later reference.
+./benchmark.py run --svn-bin-dir ~/svn-prefix/1.7.0/bin 1.7.0@1181106,5x5 3
+
+# Record 3 benchmark runs on trunk, again naming its Last-Changed-Rev.
+# (You may also set your $PATH instead of using --svn-bin-dir.)
+./benchmark.py run --svn-bin-dir ~/svn-prefix/trunk/bin trunk@1352725,5x5 3
+
+# Work with the results of above two runs
+./benchmark.py list
+./benchmark.py compare 1.7.0 trunk
+./benchmark.py show 1.7.0 trunk
+./benchmark.py chart compare 1.7.0 trunk
+./benchmark.py chart compare 1.7.0 trunk -c "update,commit,TOTAL RUN"
+
+# Rebuild r1352598, run it and chart improvements since 1.7.0.
+svn up -r1352598 ~/src/trunk
+make -C ~/src/trunk dist-clean install
+export PATH="$HOME/svn-prefix/trunk/bin:$PATH"
+which svn
+./benchmark.py run trunk@1352598,5x5 3
+./benchmark.py chart compare 1.7.0 trunk@1352598 trunk@1352725 -o chart.svg
+
+
+GLOBAL OPTIONS"""
+
+import os
+import time
+import datetime
+import sqlite3
+import optparse
+import tempfile
+import subprocess
+import random
+import shutil
+import stat
+import string
+from copy import copy
+
+IGNORE_COMMANDS = ('--version', )
+TOTAL_RUN = 'TOTAL RUN'
+
+j = os.path.join
+
+def bail(msg=None):
+ if msg:
+ print(msg)
+ exit(1)
+
+def time_str():
+ return time.strftime('%Y-%m-%d %H:%M:%S');
+
+def timedelta_to_seconds(td):
+ return ( float(td.seconds)
+ + float(td.microseconds) / (10**6)
+ + td.days * 24 * 60 * 60 )
+
+def run_cmd(cmd, stdin=None, shell=False, verbose=False):
+ if options.verbose:
+ if shell:
+ printable_cmd = cmd
+ else:
+ printable_cmd = ' '.join(cmd)
+ print('CMD:', printable_cmd)
+
+ if stdin:
+ stdin_arg = subprocess.PIPE
+ else:
+ stdin_arg = None
+
+ p = subprocess.Popen(cmd,
+ stdin=stdin_arg,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=shell)
+ stdout,stderr = p.communicate(input=stdin)
+
+ if verbose:
+ if (stdout):
+ print("STDOUT: [[[\n%s]]]" % ''.join(stdout))
+ if (stderr):
+ print("STDERR: [[[\n%s]]]" % ''.join(stderr))
+
+ return stdout, stderr
+
+
+_next_unique_basename_count = 0
+
+def next_unique_basename(prefix):
+ global _next_unique_basename_count
+ _next_unique_basename_count += 1
+ return '_'.join((prefix, str(_next_unique_basename_count)))
+
+
+si_units = [
+ (1000 ** 5, 'P'),
+ (1000 ** 4, 'T'),
+ (1000 ** 3, 'G'),
+ (1000 ** 2, 'M'),
+ (1000 ** 1, 'K'),
+ (1000 ** 0, ''),
+ ]
+def n_label(n):
+ """(stolen from hurry.filesize)"""
+ for factor, suffix in si_units:
+ if n >= factor:
+ break
+ amount = int(n/factor)
+ if isinstance(suffix, tuple):
+ singular, multiple = suffix
+ if amount == 1:
+ suffix = singular
+ else:
+ suffix = multiple
+ return str(amount) + suffix
+
+
+def split_arg_once(l_r, sep):
+ if not l_r:
+ return (None, None)
+ if sep in l_r:
+ l, r = l_r.split(sep)
+ else:
+ l = l_r
+ r = None
+ if not l:
+ l = None
+ if not r:
+ r = None
+ return (l, r)
+
+RUN_KIND_SEPARATORS=('@', ',', 'x')
+
+class RunKind:
+ def __init__(self, b_r_l_s):
+ b_r, l_s = split_arg_once(b_r_l_s, RUN_KIND_SEPARATORS[1])
+ self.branch, self.revision = split_arg_once(b_r, RUN_KIND_SEPARATORS[0])
+ self.levels, self.spread = split_arg_once(l_s, RUN_KIND_SEPARATORS[2])
+ if self.levels: self.levels = int(self.levels)
+ if self.spread: self.spread = int(self.spread)
+
+ def label(self):
+ label_parts = []
+ if self.branch:
+ label_parts.append(self.branch)
+ if self.revision:
+ label_parts.append(RUN_KIND_SEPARATORS[0])
+ label_parts.append(self.revision)
+ if self.levels or self.spread:
+ label_parts.append(RUN_KIND_SEPARATORS[1])
+ if self.levels:
+ label_parts.append(str(self.levels))
+ if self.spread:
+ label_parts.append(RUN_KIND_SEPARATORS[2])
+ label_parts.append(str(self.spread))
+ return ''.join(label_parts)
+
+ def args(self):
+ return (self.branch, self.revision, self.levels, self.spread)
+
+
+def parse_timings_selections(db, *args):
+ run_kinds = []
+
+ for arg in args:
+ run_kind = RunKind(arg)
+
+ if run_kind.revision == 'each':
+ run_kind.revision = None
+ query = TimingQuery(db, run_kind)
+ for revision in query.get_sorted_revisions():
+ revision_run_kind = copy(run_kind)
+ revision_run_kind.revision = revision
+ run_kinds.append(revision_run_kind)
+ elif run_kind.revision and run_kind.revision.startswith('last'):
+ Nstr = run_kind.revision[4:]
+ if not Nstr:
+ N = 10
+ else:
+ N = int(Nstr)
+ run_kind.revision = None
+ query = TimingQuery(db, run_kind)
+ for revision in query.get_sorted_revisions()[-N:]:
+ revision_run_kind = copy(run_kind)
+ revision_run_kind.revision = revision
+ run_kinds.append(revision_run_kind)
+ else:
+ run_kinds.append(run_kind)
+
+ return run_kinds
+
+def parse_one_timing_selection(db, *args):
+ run_kinds = parse_timings_selections(db, *args)
+ if len(run_kinds) != 1:
+ bail("I need exactly one timings identifier, not '%s'"
+ % (' '.join(*args)))
+ return run_kinds[0]
+
+
+
+
+PATHNAME_VALID_CHARS = "-_.,@%s%s" % (string.ascii_letters, string.digits)
+def filesystem_safe_string(s):
+ return ''.join(c for c in s if c in PATHNAME_VALID_CHARS)
+
+def do_div(ref, val):
+ if ref:
+ return float(val) / float(ref)
+ else:
+ return 0.0
+
+def do_diff(ref, val):
+ return float(val) - float(ref)
+
+
+# ------------------------- database -------------------------
+
+class TimingsDb:
+ def __init__(self, db_path):
+ self.db_path = db_path;
+ self.conn = sqlite3.connect(db_path)
+ self.ensure_tables_created()
+
+ def ensure_tables_created(self):
+ c = self.conn.cursor()
+
+ c.execute("""SELECT name FROM sqlite_master WHERE type='table' AND
+ name='batch'""")
+ if c.fetchone():
+ # exists
+ return
+
+ print('Creating database tables.')
+ c.executescript('''
+ CREATE TABLE batch (
+ batch_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ started TEXT,
+ ended TEXT
+ );
+
+ CREATE TABLE run_kind (
+ run_kind_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ branch TEXT NOT NULL,
+ revision TEXT NOT NULL,
+ wc_levels INTEGER,
+ wc_spread INTEGER,
+ UNIQUE(branch, revision, wc_levels, wc_spread)
+ );
+
+ CREATE TABLE run (
+ run_id INTEGER PRIMARY KEY AUTOINCREMENT,
+ batch_id INTEGER NOT NULL REFERENCES batch(batch_id),
+ run_kind_id INTEGER NOT NULL REFERENCES run_kind(run_kind_id),
+ started TEXT,
+ ended TEXT,
+ aborted INTEGER
+ );
+
+ CREATE TABLE timings (
+ run_id INTEGER NOT NULL REFERENCES run(run_id),
+ command TEXT NOT NULL,
+ sequence INTEGER,
+ timing REAL
+ );'''
+ )
+ self.conn.commit()
+ c.close();
+
+
+class Batch:
+ def __init__(self, db):
+ self.db = db
+ self.started = time_str()
+ c = db.conn.cursor()
+ c.execute("INSERT INTO batch (started) values (?)", (self.started,))
+ db.conn.commit()
+ self.id = c.lastrowid
+ c.close()
+
+ def done(self):
+ conn = self.db.conn
+ c = conn.cursor()
+ c.execute("""
+ UPDATE batch
+ SET ended = ?
+ WHERE batch_id = ?""",
+ (time_str(), self.id))
+ conn.commit()
+ c.close()
+
+class Run:
+ def __init__(self, batch, run_kind):
+ self.batch = batch
+ conn = self.batch.db.conn
+ c = conn.cursor()
+
+ c.execute("""
+ SELECT run_kind_id FROM run_kind
+ WHERE branch = ?
+ AND revision = ?
+ AND wc_levels = ?
+ AND wc_spread = ?""",
+ run_kind.args())
+ kind_ids = c.fetchone()
+ if kind_ids:
+ kind_id = kind_ids[0]
+ else:
+ c.execute("""
+ INSERT INTO run_kind (branch, revision, wc_levels, wc_spread)
+ VALUES (?, ?, ?, ?)""",
+ run_kind.args())
+ conn.commit()
+ kind_id = c.lastrowid
+
+ self.started = time_str()
+
+ c.execute("""
+ INSERT INTO run
+ (batch_id, run_kind_id, started)
+ VALUES
+ (?, ?, ?)""",
+ (self.batch.id, kind_id, self.started))
+ conn.commit()
+ self.id = c.lastrowid
+ c.close();
+ self.tic_at = None
+ self.current_command = None
+ self.timings = []
+
+ def tic(self, command):
+ if command in IGNORE_COMMANDS:
+ return
+ self.toc()
+ self.current_command = command
+ self.tic_at = datetime.datetime.now()
+
+ def toc(self):
+ if self.current_command and self.tic_at:
+ toc_at = datetime.datetime.now()
+ self.remember_timing(self.current_command,
+ timedelta_to_seconds(toc_at - self.tic_at))
+ self.current_command = None
+ self.tic_at = None
+
+ def remember_timing(self, command, seconds):
+ self.timings.append((command, seconds))
+
+ def submit_timings(self):
+ conn = self.batch.db.conn
+ c = conn.cursor()
+ print('submitting...')
+
+ c.executemany("""
+ INSERT INTO timings
+ (run_id, command, sequence, timing)
+ VALUES
+ (?, ?, ?, ?)""",
+ [(self.id, t[0], (i + 1), t[1]) for i,t in enumerate(self.timings)])
+
+ conn.commit()
+ c.close()
+
+ def done(self, aborted=False):
+ conn = self.batch.db.conn
+ c = conn.cursor()
+ c.execute("""
+ UPDATE run
+ SET ended = ?, aborted = ?
+ WHERE run_id = ?""",
+ (time_str(), aborted, self.id))
+ conn.commit()
+ c.close()
+
+
+class TimingQuery:
+ def __init__(self, db, run_kind):
+ self.cursor = db.conn.cursor()
+ self.constraints = []
+ self.values = []
+ self.timings = None
+ self.FROM_WHERE = """
+ FROM batch AS b,
+ timings AS t,
+ run AS r,
+ run_kind as k
+ WHERE
+ t.run_id = r.run_id
+ AND k.run_kind_id = r.run_kind_id
+ AND b.batch_id = r.batch_id
+ AND r.aborted = 0
+ """
+ self.append_constraint('k.branch', run_kind.branch)
+ self.each_revision = False
+ if run_kind.revision == 'each':
+ self.each_revision = True
+ else:
+ self.append_constraint('k.revision', run_kind.revision)
+ self.append_constraint('k.wc_levels', run_kind.levels)
+ self.append_constraint('k.wc_spread', run_kind.spread)
+ self.label = run_kind.label()
+
+ def append_constraint(self, column_name, val):
+ if val:
+ self.constraints.append('AND %s = ?' % column_name)
+ self.values.append(val)
+
+ def remove_last_constraint(self):
+ del self.constraints[-1]
+ del self.values[-1]
+
+ def get_sorted_X(self, x, n=1):
+ query = ['SELECT DISTINCT %s' % x,
+ self.FROM_WHERE ]
+ query.extend(self.constraints)
+ query.append('ORDER BY %s' % x)
+ c = db.conn.cursor()
+ try:
+ c.execute(' '.join(query), self.values)
+ if n == 1:
+ return [tpl[0] for tpl in c.fetchall()]
+ else:
+ return c.fetchall()
+ finally:
+ c.close()
+
+ def get_sorted_command_names(self):
+ return self.get_sorted_X('t.command')
+
+ def get_sorted_branches(self):
+ return self.get_sorted_X('k.branch')
+
+ def get_sorted_revisions(self):
+ return self.get_sorted_X('k.revision')
+
+ def get_sorted_levels_spread(self):
+ return self.get_sorted_X('k.wc_levels,k.wc_spread', n = 2)
+
+ def count_runs_batches(self):
+ query = ["""SELECT
+ count(DISTINCT r.run_id),
+ count(DISTINCT b.batch_id)""",
+ self.FROM_WHERE ]
+ query.extend(self.constraints)
+ c = db.conn.cursor()
+ try:
+ #print ' '.join(query)
+ c.execute(' '.join(query), self.values)
+ return c.fetchone()
+ finally:
+ c.close()
+
+ def get_command_timings(self, command):
+ query = ["""SELECT
+ count(t.timing),
+ min(t.timing),
+ max(t.timing),
+ avg(t.timing)""",
+ self.FROM_WHERE ]
+ self.append_constraint('t.command', command)
+ try:
+ query.extend(self.constraints)
+ c = db.conn.cursor()
+ try:
+ c.execute(' '.join(query), self.values)
+ return c.fetchone()
+ finally:
+ c.close()
+ finally:
+ self.remove_last_constraint()
+
+ def get_timings(self):
+ if self.timings:
+ return self.timings
+ self.timings = {}
+ for command_name in self.get_sorted_command_names():
+ self.timings[command_name] = self.get_command_timings(command_name)
+ return self.timings
+
+
+# ------------------------------------------------------------ run tests
+
+
+def perform_run(batch, run_kind,
+ svn_bin, svnadmin_bin, verbose):
+
+ run = Run(batch, run_kind)
+
+ def create_tree(in_dir, _levels, _spread):
+ try:
+ os.mkdir(in_dir)
+ except:
+ pass
+
+ for i in range(_spread):
+ # files
+ fn = j(in_dir, next_unique_basename('file'))
+ f = open(fn, 'w')
+ f.write('This is %s\n' % fn)
+ f.close()
+
+ # dirs
+ if (_levels > 1):
+ dn = j(in_dir, next_unique_basename('dir'))
+ create_tree(dn, _levels - 1, _spread)
+
+ def svn(*args):
+ name = args[0]
+
+ cmd = [ svn_bin ]
+ cmd.extend( list(args) )
+ if verbose:
+ print('svn cmd:', ' '.join(cmd))
+
+ stdin = None
+ if stdin:
+ stdin_arg = subprocess.PIPE
+ else:
+ stdin_arg = None
+
+ run.tic(name)
+ try:
+ p = subprocess.Popen(cmd,
+ stdin=stdin_arg,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=False)
+ stdout,stderr = p.communicate(input=stdin)
+ except OSError:
+ stdout = stderr = None
+ finally:
+ run.toc()
+
+ if verbose:
+ if (stdout):
+ print("STDOUT: [[[\n%s]]]" % ''.join(stdout))
+ if (stderr):
+ print("STDERR: [[[\n%s]]]" % ''.join(stderr))
+
+ return stdout,stderr
+
+
+ def add(*args):
+ return svn('add', *args)
+
+ def ci(*args):
+ return svn('commit', '-mm', *args)
+
+ def up(*args):
+ return svn('update', *args)
+
+ def st(*args):
+ return svn('status', *args)
+
+ def info(*args):
+ return svn('info', *args)
+
+ _chars = [chr(x) for x in range(ord('a'), ord('z') +1)]
+
+ def randstr(len=8):
+ return ''.join( [random.choice(_chars) for i in range(len)] )
+
+ def _copy(path):
+ dest = next_unique_basename(path + '_copied')
+ svn('copy', path, dest)
+
+ def _move(path):
+ dest = path + '_moved'
+ svn('move', path, dest)
+
+ def _propmod(path):
+ so, se = svn('proplist', path)
+ propnames = [line.strip() for line in so.strip().split('\n')[1:]]
+
+ # modify?
+ if len(propnames):
+ svn('ps', propnames[len(propnames) / 2], randstr(), path)
+
+ # del?
+ if len(propnames) > 1:
+ svn('propdel', propnames[len(propnames) / 2], path)
+
+ def _propadd(path):
+ # set a new one.
+ svn('propset', randstr(), randstr(), path)
+
+ def _mod(path):
+ if os.path.isdir(path):
+ _propmod(path)
+ return
+
+ f = open(path, 'a')
+ f.write('\n%s\n' % randstr())
+ f.close()
+
+ def _add(path):
+ if os.path.isfile(path):
+ return _mod(path)
+
+ if random.choice((True, False)):
+ # create a dir
+ svn('mkdir', j(path, next_unique_basename('new_dir')))
+ else:
+ # create a file
+ new_path = j(path, next_unique_basename('new_file'))
+ f = open(new_path, 'w')
+ f.write(randstr())
+ f.close()
+ svn('add', new_path)
+
+ def _del(path):
+ svn('delete', path)
+
+ _mod_funcs = (_mod, _add, _propmod, _propadd, )#_copy,) # _move, _del)
+
+ def modify_tree(in_dir, fraction):
+ child_names = os.listdir(in_dir)
+ for child_name in child_names:
+ if child_name[0] == '.':
+ continue
+ if random.random() < fraction:
+ path = j(in_dir, child_name)
+ random.choice(_mod_funcs)(path)
+
+ for child_name in child_names:
+ if child_name[0] == '.': continue
+ path = j(in_dir, child_name)
+ if os.path.isdir(path):
+ modify_tree(path, fraction)
+
+ def propadd_tree(in_dir, fraction):
+ for child_name in os.listdir(in_dir):
+ if child_name[0] == '.': continue
+ path = j(in_dir, child_name)
+ if random.random() < fraction:
+ _propadd(path)
+ if os.path.isdir(path):
+ propadd_tree(path, fraction)
+
+
+ def rmtree_onerror(func, path, exc_info):
+ """Error handler for ``shutil.rmtree``.
+
+ If the error is due to an access error (read only file)
+ it attempts to add write permission and then retries.
+
+ If the error is for another reason it re-raises the error.
+
+ Usage : ``shutil.rmtree(path, onerror=onerror)``
+ """
+ if not os.access(path, os.W_OK):
+ # Is the error an access error ?
+ os.chmod(path, stat.S_IWUSR)
+ func(path)
+ else:
+ raise
+
+ base = tempfile.mkdtemp()
+
+ # ensure identical modifications for every run
+ random.seed(0)
+
+ aborted = True
+
+ try:
+ repos = j(base, 'repos')
+ repos = repos.replace('\\', '/')
+ wc = j(base, 'wc')
+ wc2 = j(base, 'wc2')
+
+ if repos.startswith('/'):
+ file_url = 'file://%s' % repos
+ else:
+ file_url = 'file:///%s' % repos
+
+ print('\nRunning svn benchmark in', base)
+ print('dir levels: %s; new files and dirs per leaf: %s' %(
+ run_kind.levels, run_kind.spread))
+
+ started = datetime.datetime.now()
+
+ try:
+ run_cmd([svnadmin_bin, 'create', repos])
+ svn('checkout', file_url, wc)
+
+ trunk = j(wc, 'trunk')
+ create_tree(trunk, run_kind.levels, run_kind.spread)
+ add(trunk)
+ st(wc)
+ ci(wc)
+ up(wc)
+ propadd_tree(trunk, 0.05)
+ ci(wc)
+ up(wc)
+ st(wc)
+ info('-R', wc)
+
+ trunk_url = file_url + '/trunk'
+ branch_url = file_url + '/branch'
+
+ svn('copy', '-mm', trunk_url, branch_url)
+ st(wc)
+
+ up(wc)
+ st(wc)
+ info('-R', wc)
+
+ svn('checkout', trunk_url, wc2)
+ st(wc2)
+ modify_tree(wc2, 0.5)
+ st(wc2)
+ ci(wc2)
+ up(wc2)
+ up(wc)
+
+ svn('switch', branch_url, wc2)
+ modify_tree(wc2, 0.5)
+ st(wc2)
+ info('-R', wc2)
+ ci(wc2)
+ up(wc2)
+ up(wc)
+
+ modify_tree(trunk, 0.5)
+ st(wc)
+ ci(wc)
+ up(wc2)
+ up(wc)
+
+ svn('merge', '--accept=postpone', trunk_url, wc2)
+ st(wc2)
+ info('-R', wc2)
+ svn('resolve', '--accept=mine-conflict', wc2)
+ st(wc2)
+ svn('resolved', '-R', wc2)
+ st(wc2)
+ info('-R', wc2)
+ ci(wc2)
+ up(wc2)
+ up(wc)
+
+ svn('merge', '--accept=postpone', '--reintegrate', branch_url, trunk)
+ st(wc)
+ svn('resolve', '--accept=mine-conflict', wc)
+ st(wc)
+ svn('resolved', '-R', wc)
+ st(wc)
+ ci(wc)
+ up(wc2)
+ up(wc)
+
+ svn('delete', j(wc, 'branch'))
+ ci(wc)
+ up(wc)
+
+ aborted = False
+
+ finally:
+ stopped = datetime.datetime.now()
+ print('\nDone with svn benchmark in', (stopped - started))
+
+ run.remember_timing(TOTAL_RUN,
+ timedelta_to_seconds(stopped - started))
+ finally:
+ run.done(aborted)
+ run.submit_timings()
+ shutil.rmtree(base, onerror=rmtree_onerror)
+
+ return aborted
+
+
+# ---------------------------------------------------------------------
+
+
+def cmdline_run(db, options, run_kind_str, N=1):
+ run_kind = parse_one_timing_selection(db, run_kind_str)
+
+ N = int(N)
+
+ print('Hi, going to run a Subversion benchmark series of %d runs...' % N)
+ print('Label is %s' % run_kind.label())
+
+ # can we run the svn binaries?
+ svn_bin = j(options.svn_bin_dir, 'svn')
+ svnadmin_bin = j(options.svn_bin_dir, 'svnadmin')
+
+ for b in (svn_bin, svnadmin_bin):
+ so,se = run_cmd([b, '--version'])
+ if not so:
+ bail("Can't run %s" % b)
+
+ print(', '.join([s.strip() for s in so.split('\n')[:2]]))
+
+ batch = Batch(db)
+
+ for i in range(N):
+ print('Run %d of %d' % (i + 1, N))
+ perform_run(batch, run_kind,
+ svn_bin, svnadmin_bin, options.verbose)
+
+ batch.done()
+
+
+def cmdline_list(db, options, *args):
+ run_kinds = parse_timings_selections(db, *args)
+
+ for run_kind in run_kinds:
+
+ constraints = []
+ def add_if_not_none(name, val):
+ if val:
+ constraints.append(' %s = %s' % (name, val))
+ add_if_not_none('branch', run_kind.branch)
+ add_if_not_none('revision', run_kind.revision)
+ add_if_not_none('levels', run_kind.levels)
+ add_if_not_none('spread', run_kind.spread)
+ if constraints:
+ print('For\n', '\n'.join(constraints))
+ print('I found:')
+
+ d = TimingQuery(db, run_kind)
+
+ cmd_names = d.get_sorted_command_names()
+ if cmd_names:
+ print('\n%d command names:\n ' % len(cmd_names), '\n '.join(cmd_names))
+
+ branches = d.get_sorted_branches()
+ if branches and (len(branches) > 1 or branches[0] != run_kind.branch):
+ print('\n%d branches:\n ' % len(branches), '\n '.join(branches))
+
+ revisions = d.get_sorted_revisions()
+ if revisions and (len(revisions) > 1 or revisions[0] != run_kind.revision):
+ print('\n%d revisions:\n ' % len(revisions), '\n '.join(revisions))
+
+ levels_spread = d.get_sorted_levels_spread()
+ if levels_spread and (
+ len(levels_spread) > 1
+ or levels_spread[0] != (run_kind.levels, run_kind.spread)):
+ print('\n%d kinds of levels x spread:\n ' % len(levels_spread), '\n '.join(
+ [ ('%dx%d' % (l, s)) for l,s in levels_spread ]))
+
+ print("\n%d runs in %d batches.\n" % (d.count_runs_batches()))
+
+
+def cmdline_show(db, options, *run_kind_strings):
+ run_kinds = parse_timings_selections(db, *run_kind_strings)
+ for run_kind in run_kinds:
+ q = TimingQuery(db, run_kind)
+ timings = q.get_timings()
+
+ s = []
+ s.append('Timings for %s' % run_kind.label())
+ s.append(' N min max avg operation (unit is seconds)')
+
+ for command_name in q.get_sorted_command_names():
+ if options.command_names and command_name not in options.command_names:
+ continue
+ n, tmin, tmax, tavg = timings[command_name]
+
+ s.append('%4s %7.2f %7.2f %7.2f %s' % (
+ n_label(n),
+ tmin,
+ tmax,
+ tavg,
+ command_name))
+
+ print('\n'.join(s))
+
+
+def cmdline_compare(db, options, *args):
+ run_kinds = parse_timings_selections(db, *args)
+ if len(run_kinds) < 2:
+ bail("Need at least two sets of timings to compare.")
+
+
+ left_kind = run_kinds[0]
+ leftq = TimingQuery(db, left_kind)
+ left = leftq.get_timings()
+ if not left:
+ bail("No timings for %s" % left_kind.label())
+
+ for run_kind_idx in range(1, len(run_kinds)):
+ right_kind = run_kinds[run_kind_idx]
+
+ rightq = TimingQuery(db, right_kind)
+ right = rightq.get_timings()
+ if not right:
+ print("No timings for %s" % right_kind.label())
+ continue
+
+ label = 'Compare %s to %s' % (right_kind.label(), left_kind.label())
+
+ s = [label]
+
+ verbose = options.verbose
+ if not verbose:
+ s.append(' N avg operation')
+ else:
+ s.append(' N min max avg operation')
+
+ command_names = [name for name in leftq.get_sorted_command_names()
+ if name in right]
+ if options.command_names:
+ command_names = [name for name in command_names
+ if name in options.command_names]
+
+ for command_name in command_names:
+ left_N, left_min, left_max, left_avg = left[command_name]
+ right_N, right_min, right_max, right_avg = right[command_name]
+
+ N_str = '%s/%s' % (n_label(left_N), n_label(right_N))
+ avg_str = '%7.2f|%+7.3f' % (do_div(left_avg, right_avg),
+ do_diff(left_avg, right_avg))
+
+ if not verbose:
+ s.append('%9s %-16s %s' % (N_str, avg_str, command_name))
+ else:
+ min_str = '%7.2f|%+7.3f' % (do_div(left_min, right_min),
+ do_diff(left_min, right_min))
+ max_str = '%7.2f|%+7.3f' % (do_div(left_max, right_max),
+ do_diff(left_max, right_max))
+
+ s.append('%9s %-16s %-16s %-16s %s' % (N_str, min_str, max_str, avg_str,
+ command_name))
+
+ s.extend([
+ '(legend: "1.23|+0.45" means: slower by factor 1.23 and by 0.45 seconds;',
+ ' factor < 1 and seconds < 0 means \'%s\' is faster.'
+ % right_kind.label(),
+ ' "2/3" means: \'%s\' has 2 timings on record, the other has 3.)'
+ % left_kind.label()
+ ])
+
+
+ print('\n'.join(s))
+
+
+# ------------------------------------------------------- charts
+
+def cmdline_chart_compare(db, options, *args):
+ import matplotlib
+ matplotlib.use('Agg')
+ import numpy as np
+ import matplotlib.pylab as plt
+
+ labels = []
+ timing_sets = []
+ command_names = None
+
+ run_kinds = parse_timings_selections(db, *args)
+
+ # iterate the timings selections and accumulate data
+ for run_kind in run_kinds:
+ query = TimingQuery(db, run_kind)
+ timings = query.get_timings()
+ if not timings:
+ print("No timings for %s" % run_kind.label())
+ continue
+ labels.append(run_kind.label())
+ timing_sets.append(timings)
+
+ # it only makes sense to compare those commands that have timings
+ # in the first selection, because that is the one everything else
+ # is compared to. Remember the first selection's command names.
+ if not command_names:
+ command_names = query.get_sorted_command_names()
+
+
+ if len(timing_sets) < 2:
+ bail("Not enough timings")
+
+ if options.command_names:
+ command_names = [name for name in command_names
+ if name in options.command_names]
+
+ chart_path = options.chart_path
+ if not chart_path:
+ chart_path = 'compare_' + '_'.join(
+ [ filesystem_safe_string(l) for l in labels ]
+ ) + '.svg'
+
+ N = len(command_names)
+ M = len(timing_sets) - 1
+ if M < 2:
+ M = 2
+
+ group_positions = np.arange(N) # the y locations for the groups
+ dist = 1. / (1. + M)
+ height = (1. - dist) / M # the height of the bars
+
+ fig = plt.figure(figsize=(12, 5 + 0.2*N*M))
+ plot1 = fig.add_subplot(121)
+ plot2 = fig.add_subplot(122)
+
+ left = timing_sets[0]
+
+ # Iterate timing sets. Each loop produces one bar for each command name
+ # group.
+ for label_i,label in enumerate(labels[1:],1):
+ right = timing_sets[label_i]
+ if not right:
+ continue
+
+ for cmd_i, command_name in enumerate(command_names):
+ if command_name not in right:
+ #skip
+ continue
+
+ left_N, left_min, left_max, left_avg = left[command_name]
+ right_N, right_min, right_max, right_avg = right[command_name]
+
+ div_avg = 100. * (do_div(left_avg, right_avg) - 1.0)
+ if div_avg <= 0:
+ col = '#55dd55'
+ else:
+ col = '#dd5555'
+
+ diff_val = do_diff(left_avg, right_avg)
+
+ ofs = (dist + height) / 2. + height * (label_i - 1)
+
+ barheight = height * (1.0 - dist)
+
+ y = float(cmd_i) + ofs
+
+ plot1.barh((y, ),
+ (div_avg, ),
+ barheight,
+ color=col, edgecolor='white')
+ plot1.text(0., y + height/2.,
+ '%s %+5.1f%%' % (label, div_avg),
+ ha='right', va='center', size='small',
+ rotation=0, family='monospace')
+
+ plot2.barh((y, ),
+ (diff_val, ),
+ barheight,
+ color=col, edgecolor='white')
+ plot2.text(0., y + height/2.,
+ '%s %+6.2fs' % (label, diff_val),
+ ha='right', va='center', size='small',
+ rotation=0, family='monospace')
+
+
+ for p in (plot1, plot2):
+ xlim = list(p.get_xlim())
+ if xlim[1] < 10.:
+ xlim[1] = 10.
+ # make sure the zero line is far enough right so that the annotations
+ # fit inside the chart. About half the width should suffice.
+ if xlim[0] > -xlim[1]:
+ xlim[0] = -xlim[1]
+ p.set_xlim(*xlim)
+ p.set_xticks((0,))
+ p.set_yticks(group_positions + (height / 2.))
+ p.set_yticklabels(())
+ p.set_ylim((len(command_names), 0))
+ p.grid()
+
+ plot1.set_xticklabels(('+-0%',), rotation=0)
+ plot1.set_title('Average runtime change from %s in %%' % labels[0],
+ size='medium')
+
+ plot2.set_xticklabels(('+-0s',), rotation=0)
+ plot2.set_title('Average runtime change from %s in seconds' % labels[0],
+ size='medium')
+
+ margin = 1./(2 + N*M)
+ titlemargin = 0
+ if options.title:
+ titlemargin = margin * 1.5
+
+ fig.subplots_adjust(left=0.005, right=0.995, wspace=0.3, bottom=margin,
+ top=1.0-margin-titlemargin)
+
+ ystep = (1.0 - 2.*margin - titlemargin) / len(command_names)
+
+ for idx,command_name in enumerate(command_names):
+ ylabel = '%s\nvs. %.1fs' % (
+ command_name,
+ left[command_name][3])
+
+ ypos=1.0 - margin - titlemargin - ystep/M - ystep * idx
+ plt.figtext(0.5, ypos,
+ command_name,
+ ha='center', va='top',
+ size='medium', weight='bold')
+ plt.figtext(0.5, ypos - ystep/(M+1),
+ '%s\n= %.2fs' % (
+ labels[0], left[command_name][3]),
+ ha='center', va='top',
+ size='small')
+
+ if options.title:
+ plt.figtext(0.5, 1. - titlemargin/2, options.title, ha='center',
+ va='center', weight='bold')
+
+ plt.savefig(chart_path)
+ print('wrote chart file:', chart_path)
+
+
+# ------------------------------------------------------------ main
+
+
+# Custom option formatter, keeping newlines in the description.
+# adapted from:
+# http://groups.google.com/group/comp.lang.python/msg/09f28e26af0699b1
+import textwrap
+class IndentedHelpFormatterWithNL(optparse.IndentedHelpFormatter):
+ def format_description(self, description):
+ if not description: return ""
+ desc_width = self.width - self.current_indent
+ indent = " "*self.current_indent
+ bits = description.split('\n')
+ formatted_bits = [
+ textwrap.fill(bit,
+ desc_width,
+ initial_indent=indent,
+ subsequent_indent=indent)
+ for bit in bits]
+ result = "\n".join(formatted_bits) + "\n"
+ return result
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(formatter=IndentedHelpFormatterWithNL())
+ # -h is automatically added.
+ ### should probably expand the help for that. and see about -?
+ parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
+ help='Verbose operation')
+ parser.add_option('-b', '--svn-bin-dir', action='store', dest='svn_bin_dir',
+ default='',
+ help='Specify directory to find Subversion binaries in')
+ parser.add_option('-f', '--db-path', action='store', dest='db_path',
+ default='benchmark.db',
+ help='Specify path to SQLite database file')
+ parser.add_option('-o', '--chart-path', action='store', dest='chart_path',
+ help='Supply a path for chart output.')
+ parser.add_option('-c', '--command-names', action='store',
+ dest='command_names',
+ help='Comma separated list of command names to limit to.')
+ parser.add_option('-t', '--title', action='store',
+ dest='title',
+ help='For charts, a title to print in the chart graphics.')
+
+ parser.set_description(__doc__)
+ parser.set_usage('')
+
+
+ options, args = parser.parse_args()
+
+ def usage(msg=None):
+ parser.print_help()
+ if msg:
+ print("")
+ print(msg)
+ bail()
+
+ # there should be at least one arg left: the sub-command
+ if not args:
+ usage('No command argument supplied.')
+
+ cmd = args[0]
+ del args[0]
+
+ db = TimingsDb(options.db_path)
+
+ if cmd == 'run':
+ if len(args) < 1 or len(args) > 2:
+ usage()
+ cmdline_run(db, options, *args)
+
+ elif cmd == 'compare':
+ if len(args) < 2:
+ usage()
+ cmdline_compare(db, options, *args)
+
+ elif cmd == 'list':
+ cmdline_list(db, options, *args)
+
+ elif cmd == 'show':
+ cmdline_show(db, options, *args)
+
+ elif cmd == 'chart':
+ if 'compare'.startswith(args[0]):
+ cmdline_chart_compare(db, options, *args[1:])
+ else:
+ usage()
+
+ else:
+ usage('Unknown subcommand argument: %s' % cmd)
diff --git a/tools/dev/benchmarks/suite1/cronjob b/tools/dev/benchmarks/suite1/cronjob
new file mode 100755
index 0000000..5b74292
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/cronjob
@@ -0,0 +1,102 @@
+#!/bin/bash
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# This is the cronjob as run on our ASF box aka svn-qavm.
+# It uses neels' mad bash script magic called 'pat' to update and
+# build the latest trunk, invokes a benchmark and sends as mail.
+#
+# A word on 'pat': this is a grown-out-of-proportions bash script that holds
+# all the small and large tasks that I do while developing on Subversion.
+# While it works for me, it's not particularly beautifully coded --
+# wouldn't publish it in Subversion's trunk, but if you want to find out
+# what it does: http://hofmeyr.de/pat/
+
+#EMAILS=your@ema.il add@ress.es
+EMAILS=dev@subversion.apache.org
+
+echo
+echo "--------------------------------------------------------------------"
+date
+echo
+
+results="$(tempfile)"
+
+benchdir=/home/neels/svnbench
+patbin=/home/neels/bin/pat
+patbase=/home/neels/pat
+
+
+# first update trunk to HEAD and rebuild.
+# update/build is logged to the cronjob log (via stdout)
+
+cd "$patbase/trunk"
+"$patbin" update
+
+if [ "$?" -ne "0" ]; then
+ subject="Failed to update to HEAD."
+ echo "$subject" > "$results"
+ echo "$subject"
+else
+
+ rev="$("$patbase"/stable/prefix/bin/svn info "$patbase"/trunk/src | grep Revision)"
+ if [ -z "$rev" ]; then
+ subject="Working copy problem."
+ echo "$subject" > "$results"
+ echo "$subject"
+ else
+
+ NONMAINTAINER=1 "$patbin" remake
+ if [ "$?" -ne "0" ]; then
+ subject="Failed to build $rev."
+ echo "$subject" > "$results"
+ echo "$subject"
+ else
+
+
+ # updating and building succeeded!
+ # run the benchmark:
+
+ compiled="$("$patbase"/trunk/prefix/bin/svn --version | grep "compiled")"
+ subject="$rev$compiled"
+
+ cd "$benchdir"
+
+ # make more or less sure that runs don't leak into each other via
+ # I/O caching.
+ sync
+
+ # basically, just run it. But also, I want to
+ # - append output to stdout, for cronjob logging.
+ # - send output as mail, but only this run's output less update&build
+ time -p ./run 2>&1 | tee "$results"
+ time -p ./generate_charts 2>&1 | tee -a "$results"
+ fi
+ fi
+fi
+
+if [ -n "$EMAILS" ]; then
+ cat "$results" | mail -s "[svnbench] $subject" $EMAILS
+else
+ echo "No email addresses configured."
+fi
+
+rm "$results"
+
diff --git a/tools/dev/benchmarks/suite1/crontab.entry b/tools/dev/benchmarks/suite1/crontab.entry
new file mode 100644
index 0000000..23f7aa4
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/crontab.entry
@@ -0,0 +1,5 @@
+# This invokes the benchmarking cronjob as run on our ASF box aka svn-qavm
+# (ask danielsh about the VM).
+# m h dom mon dow command
+21 0 * * Mon /home/neels/svnbench/cronjob >>/home/neels/cronjob.log 2>&1
+
diff --git a/tools/dev/benchmarks/suite1/generate_charts b/tools/dev/benchmarks/suite1/generate_charts
new file mode 100755
index 0000000..8e16526
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/generate_charts
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+SVN_A_NAME="1.7.0"
+SVN_B_NAME="trunk"
+
+# benchmark script and parameters...
+benchmark="$PWD/benchmark.py"
+
+db="$PWD/${SVNBENCH_DEBUG}benchmark.db"
+
+chartsdir="$PWD/charts"
+
+mkdir -p "$chartsdir/.busy"
+
+if [ ! -e "$chartsdir/README" ]; then
+ cat > "$chartsdir/README" <<END
+These charts are generated by svn benchmark suite1.
+http://svn.apache.org/repos/asf/subversion/trunk/tools/dev/benchmarks/suite1
+
+*DISCLAIMER* - This tests only file://-URL access on a GNU/Linux VM.
+This is intended to measure changes in performance of the local working
+copy layer, *only*. These results are *not* generally true for everyone.
+END
+fi
+
+for levelspread in "" 5x5 1x100 100x1; do
+ if [ -z "$levelspread" ]; then
+ lsarg=""
+ lstitle=""
+ else
+ lsarg=",$levelspread"
+ lstitle=", WC dir levels x spread = $levelspread"
+ fi
+ N=12
+ "$benchmark" -f "$db" chart compare \
+ ${SVN_A_NAME}$lsarg ${SVN_B_NAME}@last${N}$lsarg \
+ -o "$chartsdir/.busy/compare_${SVN_A_NAME}_${SVN_B_NAME}@last${N}$lsarg.svg" \
+ -t "svn client benchmarks, file:// access *only*$lstitle"
+done
+
+mv "$chartsdir/.busy/"*.svg "$chartsdir/"
+rmdir "$chartsdir/.busy"
+
diff --git a/tools/dev/benchmarks/suite1/run b/tools/dev/benchmarks/suite1/run
new file mode 100755
index 0000000..c146ea0
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/run
@@ -0,0 +1,145 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# debug? Just uncomment.
+#SVNBENCH_DEBUG=DEBUG_
+if [ -n "$SVNBENCH_DEBUG" ]; then
+ SVNBENCH_DEBUG="DEBUG_"
+fi
+
+# Subversion bin-dir used for maintenance of working copies
+SVN_STABLE="$HOME/pat/stable/prefix/bin/"
+
+# Where to find the svn binaries you want to benchmark, what are their labels
+# and Last Changed Revisions?
+# side A
+SVN_A_NAME="1.7.0"
+SVN_A="$HOME/pat/bench/prefix/bin"
+SVN_A_REV="$("$SVN_STABLE"/svnversion -c "$HOME/pat/bench/src" | sed 's/.*://')"
+
+# side B
+SVN_B_NAME="trunk"
+SVN_B="$HOME/pat/trunk/prefix/bin"
+SVN_B_REV="$("$SVN_STABLE"/svnversion -c "$HOME/pat/trunk/src" | sed 's/.*://')"
+
+echo "$SVN_A_NAME@$SVN_A_REV vs. $SVN_B_NAME@$SVN_B_REV"
+
+# benchmark script and parameters...
+benchmark="$PWD/benchmark.py"
+
+db="$PWD/${SVNBENCH_DEBUG}benchmark.db"
+
+batch(){
+ levels="$1"
+ spread="$2"
+ N="$3"
+
+ # SVN_A is a fixed tag, currently 1.7.0. For each call, run this once.
+ # It will be called again and again for each trunk build being tested,
+ # that's why we don't really need to run it $N times every time.
+ N_for_A=1
+ "$benchmark" "--db-path=$db" "--svn-bin-dir=$SVN_A" \
+ run "$SVN_A_NAME@$SVN_A_REV,${levels}x$spread" "$N_for_A" >/dev/null
+
+ # SVN_B is a branch, i.e. the moving target, benchmarked at a specific
+ # point in history each time this script is called. Run this $N times.
+ "$benchmark" "--db-path=$db" "--svn-bin-dir=$SVN_B" \
+ run "$SVN_B_NAME@$SVN_B_REV,${levels}x$spread" $N >/dev/null
+}
+
+N=3
+al=5
+as=5
+bl=100
+bs=1
+cl=1
+cs=100
+
+if [ -n "$SVNBENCH_DEBUG" ]; then
+ echo "DEBUG"
+ N=1
+ al=1
+ as=1
+ bl=2
+ bs=1
+ cl=1
+ cs=2
+fi
+
+
+{
+started="$(date)"
+echo "Started at $started"
+
+echo "
+*DISCLAIMER* - This tests only file://-URL access on a GNU/Linux VM.
+This is intended to measure changes in performance of the local working
+copy layer, *only*. These results are *not* generally true for everyone.
+
+Charts of this data are available at http://svn-qavm.apache.org/charts/"
+
+if [ -z "$SVNBENCH_SUMMARY_ONLY" ]; then
+ batch $al $as $N
+ batch $bl $bs $N
+ batch $cl $cs $N
+else
+ echo "(not running benchmarks, just printing results on record.)"
+fi
+
+echo ""
+echo "Averaged-total results across all runs:"
+echo "---------------------------------------"
+echo ""
+"$benchmark" "--db-path=$db" \
+ compare "$SVN_A_NAME" "$SVN_B_NAME@$SVN_B_REV"
+
+echo ""
+echo ""
+echo "Above totals split into separate <dir-levels>x<dir-spread> runs:"
+echo "----------------------------------------------------------------"
+echo ""
+
+for lvlspr in "${al}x${as}" "${bl}x${bs}" "${cl}x${cs}"; do
+ "$benchmark" "--db-path=$db" \
+ compare "$SVN_A_NAME,$lvlspr" "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
+ echo ""
+done
+
+echo ""
+echo ""
+echo "More detail:"
+echo "------------"
+echo ""
+
+for lvlspr in "${al}x${as}" "${bl}x${bs}" "${cl}x${cs}" "" ; do
+ "$benchmark" "--db-path=$db" show "$SVN_A_NAME,$lvlspr"
+ echo --
+ "$benchmark" "--db-path=$db" show "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
+ echo --
+ "$benchmark" "--db-path=$db" \
+ compare -v "$SVN_A_NAME,$lvlspr" "$SVN_B_NAME@$SVN_B_REV,$lvlspr"
+ echo ""
+ echo ""
+done
+
+echo ""
+echo "Had started at $started,"
+echo " done at $(date)"
+} 2>&1 | tee results.txt
+
diff --git a/tools/dev/benchmarks/suite1/run.bat b/tools/dev/benchmarks/suite1/run.bat
new file mode 100644
index 0000000..6d3d466
--- /dev/null
+++ b/tools/dev/benchmarks/suite1/run.bat
@@ -0,0 +1,105 @@
+:: Licensed to the Apache Software Foundation (ASF) under one
+:: or more contributor license agreements. See the NOTICE file
+:: distributed with this work for additional information
+:: regarding copyright ownership. The ASF licenses this file
+:: to you under the Apache License, Version 2.0 (the
+:: "License"); you may not use this file except in compliance
+:: with the License. You may obtain a copy of the License at
+::
+:: http://www.apache.org/licenses/LICENSE-2.0
+::
+:: Unless required by applicable law or agreed to in writing,
+:: software distributed under the License is distributed on an
+:: "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+:: KIND, either express or implied. See the License for the
+:: specific language governing permissions and limitations
+:: under the License.
+
+@ECHO OFF
+
+ECHO.THIS SCRIPT IS CURRENTLY OUTDATED.
+GOTO :EOF
+
+SETLOCAL EnableDelayedExpansion
+
+:: Where are the svn binaries you want to benchmark?
+SET SVN_1_6=C:\path\to\1.6-svn\bin\svn
+SET SVN_trunk=C:\path\to\trunk-svn\bin\svn
+
+SET benchmark=%CD%\benchmark.py
+
+SET my_datetime=%date%-%time%
+SET my_datetime=%my_datetime: =_%
+SET my_datetime=%my_datetime:/=_%
+SET my_datetime=%my_datetime::=%
+SET my_datetime=%my_datetime:.=%
+SET my_datetime=%my_datetime:,=%
+SET parent=%my_datetime%
+SET inital_workdir=%CD%
+mkdir "%parent%"
+cd "%parent%"
+ECHO %CD%
+
+GOTO main
+
+:batch
+ SET levels=%1
+ SET spread=%2
+ SET N=%3
+ SET pre=%levels%x%spread%_
+ ECHO.
+ ECHO.---------------------------------------------------------------------
+ ECHO.
+ ECHO.Results for dir levels: %levels% spread: %spread%
+ CALL "%benchmark%" --svn="%SVN_1_6%" run %pre%1.6 %levels% %spread% %N% > NUL
+ CALL "%benchmark%" --svn="%SVN_trunk%" run %pre%trunk %levels% %spread% %N% > NUL
+ CALL "%benchmark%" compare %pre%1.6 %pre%trunk
+ GOTO :EOF
+
+:main
+SET N=6
+SET al=5
+SET as=5
+SET bl=25
+SET bs=1
+SET cl=1
+SET cs=100
+
+::::DEBUG
+::SET N=1
+::SET al=1
+::SET as=1
+::SET bl=2
+::SET bs=1
+::SET cl=1
+::SET cs=2
+::::DEBUG
+
+SET started=%date%-%time%
+ECHO.Started at %started%
+ECHO.
+
+CALL :batch %al% %as% %N%
+CALL :batch %bl% %bs% %N%
+CALL :batch %cl% %cs% %N%
+
+ECHO.
+ECHO.=========================================================================
+ECHO.
+FOR %%F IN (*x*_1.6) DO SET all_1.6=!all_1.6! %%F
+CALL "%benchmark%" combine total_1.6 %all_1.6% > NUL
+FOR %%F IN (*x*_trunk) DO SET all_trunk=!all_trunk! %%F
+CALL "%benchmark%" combine total_trunk %all_trunk% > NUL
+
+ECHO.comparing averaged totals..."
+CALL "%benchmark%" compare total_1.6 total_trunk
+
+ECHO.
+ECHO.Had started at %started%,
+ECHO. done at %date%-%time%
+ECHO %CD%
+
+cd "%inital_workdir%"
+IF EXIST %parent%\total_trunk rmdir /S /Q "%parent%"
+
+ENDLOCAL
diff --git a/tools/dev/build-svn-deps-win.pl b/tools/dev/build-svn-deps-win.pl
new file mode 100755
index 0000000..d936369
--- /dev/null
+++ b/tools/dev/build-svn-deps-win.pl
@@ -0,0 +1,919 @@
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# Script to build all the dependencies for Subversion on Windows
+# It's been written for Windows 8 and Visual Studio 2012, but
+# it's entirely possible it will work with older versions of both.
+
+# The goal here is not to necessarily have everyone using this script.
+# But rather to be able to produce binary packages of the dependencies
+# already built to allow developers to be able to download or checkout
+# Subversion and quickly get up a development environment.
+
+# Prerequisites:
+# Perl: http://www.activestate.com/activeperl/downloads
+# Python: http://www.activestate.com/activepython/downloads
+# 7-Zip: http://www.7-zip.org/download.html
+# CMake: http://www.cmake.org/cmake/resources/software.html
+# Microsoft Visual Studio 2012 (Ultimate has been tested, Express does not work)
+#
+# You probably want these on your PATH. The installers usually
+# offer an option to do that for you so if you can let them.
+#
+# You are expected to run this script within the correct Visual Studio
+# Shell. Probably "VS2012 x86 Native Tools Command Prompt". This
+# sets the proper PATH arguments so that the the compiler tools are
+# available.
+#
+# TODO:
+# Find some way to work around the lack of devenv in Express (msbuild will help some)
+# Include a package target that zips everything up.
+# Perl script that runs the Subversion get-make.py tool with the right args.
+# Alternatively update gen-make.py with an arg that knows about our layout.
+# Make the Windows build not expect to go looking into source code (httpd/zlib)
+# Add SWIG (to support checkout builds where SWIG generation hasn't been done).
+# Usage/help output from the usual flags/on error input.
+# Make SQLITE_VER friendly since we're using no dots right now.
+# Work out the fixes to the projects' sources and contribute them back.
+# Allow selection of Arch (x86 and x64)
+# ZLib support for OpenSSL (have to patch openssl)
+# Use CMake zlib build instead.
+# Assembler support for OpenSSL.
+# Add more specific commands to the command line (e.g. build-httpd)
+
+###################################
+###### V A R I A B L E S ######
+###################################
+package Vars;
+# variables in the Vars package can be overriden from the command
+# line with the FOO=BAR syntax. If you want any defaults to reference
+# other variables the defaults need to be in set_defaults() below to
+# allow the defaults to be set after processing user set variables.
+
+# Paths to commands to use, provide full paths if it's not
+# on your PATH already.
+our $SEVEN_ZIP = 'C:\Program Files\7-Zip\7z.exe';
+our $CMAKE = 'cmake';
+our $NMAKE = 'nmake';
+# Use the .com version so we get output, the .exe doesn't produce any output
+our $DEVENV = 'devenv.com';
+our $VCUPGRADE = 'vcupgrade';
+our $PYTHON = 'python';
+
+# Versions of the dependencies we will use
+# Change these if you want but these are known to work with
+# this script as is.
+our $HTTPD_VER = '2.4.4';
+our $APR_VER = '1.4.6';
+our $APU_VER = '1.5.2'; # apr-util version
+our $API_VER = '1.2.1'; # arp-iconv version
+our $ZLIB_VER = '1.2.8';
+our $OPENSSL_VER = '1.0.1e';
+our $PCRE_VER = '8.35';
+our $BDB_VER = '5.3.21';
+our $SQLITE_VER = '3071602';
+our $SERF_VER = '1.3.6';
+our $NEON_VER = '0.29.6';
+
+# Sources for files to download
+our $AWK_URL = 'http://www.cs.princeton.edu/~bwk/btl.mirror/awk95.exe';
+our $HTTPD_URL;
+our $APR_URL;
+our $APU_URL;
+our $API_URL;
+our $ZLIB_URL;
+our $OPENSSL_URL;
+our $PCRE_URL;
+our $BDB_URL;
+our $SQLITE_URL;
+our $SERF_URL;
+our $NEON_URL;
+our $PROJREF_URL = 'https://downloads.redhoundsoftware.com/blog/ProjRef.py';
+
+# Location of the already downloaded file.
+# by default these are undefined and set by the downloader.
+# However, they can be overriden from the commandline and then
+# the downloader is skipped. Note that BDB has no downloader
+# so it must be overriden from the command line.
+our $AWK_FILE;
+our $HTTPD_FILE;
+our $APR_FILE;
+our $APU_FILE;
+our $API_FILE;
+our $ZLIB_FILE;
+our $OPENSSL_FILE;
+our $PCRE_FILE;
+our $BDB_FILE;
+our $SQLITE_FILE;
+our $SERF_FILE;
+our $NEON_FILE;
+our $PROJREF_FILE;
+
+# Various directories we use
+our $TOPDIR = Cwd::cwd(); # top of our tree
+our $INSTDIR; # where we install to
+our $BLDDIR; # directory where we actually build
+our $SRCDIR; # directory where we store package files
+
+# Some other options
+our $VS_VER;
+our $NEON;
+our $SVN_VER = '1.9.x';
+our $DEBUG = 0;
+
+# Utility function to remove dots from a string
+sub remove_dots {
+ my $in = shift;
+
+ $in =~ tr/.//d;
+ return $in;
+}
+
+# unless the variable is already defined set the value
+sub set_default {
+ my $var = shift;
+ my $value = shift;
+
+ unless (defined($$var)) {
+ $$var = $value;
+ }
+}
+
+sub set_svn_ver_defaults {
+ my ($svn_major, $svn_minor, $svn_patch) = $SVN_VER =~ /^(\d+)\.(\d+)\.(.+)$/;
+
+ if ($svn_major > 1 or ($svn_major == 1 and $svn_minor >= 8)) {
+ $NEON=0 unless defined($NEON);
+ } else {
+ $NEON=1 unless defined($NEON);
+ }
+}
+
+# Any variables with defaults that reference other values
+# should be set here. This defers setting of the default until runtime in these cases.
+sub set_defaults {
+ set_default(\$HTTPD_URL, "http://archive.apache.org/dist/httpd/httpd-$HTTPD_VER.tar.bz2");
+ set_default(\$APR_URL, "http://archive.apache.org/dist/apr/apr-$APR_VER.tar.bz2");
+ set_default(\$APU_URL, "http://archive.apache.org/dist/apr/apr-util-$APU_VER.tar.bz2");
+ set_default(\$API_URL, "http://archive.apache.org/dist/apr/apr-iconv-$API_VER.tar.bz2");
+ set_default(\$ZLIB_URL, "http://sourceforge.net/projects/libpng/files/zlib/$ZLIB_VER/zlib" . remove_dots($ZLIB_VER) . '.zip');
+ set_default(\$OPENSSL_URL, "http://www.openssl.org/source/openssl-$OPENSSL_VER.tar.gz");
+ set_default(\$PCRE_URL, "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-$PCRE_VER.zip");
+ set_default(\$BDB_URL, "http://download.oracle.com/berkeley-db/db-5.3.21.zip");
+ set_default(\$SQLITE_URL, "http://www.sqlite.org/2013/sqlite-amalgamation-$SQLITE_VER.zip");
+ set_default(\$SERF_URL, "https://archive.apache.org/dist/serf/serf-$SERF_VER.zip");
+ set_default(\$NEON_URL, "http://www.webdav.org/neon/neon-$NEON_VER.tar.gz");
+ set_default(\$INSTDIR, $TOPDIR);
+ set_default(\$BLDDIR, "$TOPDIR\\build");
+ set_default(\$SRCDIR, "$TOPDIR\\sources");
+ set_svn_ver_defaults();
+}
+
+#################################
+###### M A I N ######
+#################################
+# You shouldn't have any reason to modify below this unless you've changed
+# versions of something.
+package main;
+
+use warnings;
+use strict;
+
+use LWP::Simple;
+use File::Path;
+use File::Copy;
+use File::Basename;
+use File::Find;
+use Cwd;
+use Config;
+
+# Full path to perl, this shouldn't need to be messed with
+my $PERL = $Config{perlpath};
+
+# Directory constants that we setup for convenience, but that
+# shouldn't be changed since they are assumed in the build systems
+# of the various dependencies.
+my $HTTPD; # Where httpd gets built
+my $BDB; # Where bdb gets built
+my $BINDIR; # where binaries are installed
+my $LIBDIR; # where libraries are installed
+my $INCDIR; # where headers are installed
+my $SRCLIB; # httpd's srclib dir
+
+# defer setting these values till runtime so users can override the
+# user controlled vars they derive from.
+sub set_paths {
+ $HTTPD = "$BLDDIR\\httpd";
+ $BDB = "$BLDDIR\\bdb";
+ $BINDIR = "$INSTDIR\\bin";
+ $LIBDIR = "$INSTDIR\\lib";
+ $INCDIR = "$INSTDIR\\include";
+ $SRCLIB = "$HTTPD\\srclib";
+ # Add bin to PATH this will be needed for at least awk later on
+ $ENV{PATH} = "$BINDIR;$ENV{PATH}";
+ # Setup LIB and INCLUDE so we can find BDB
+ $ENV{LIB} = "$LIBDIR;$ENV{LIB}";
+ $ENV{INCLUDE} = "$INCDIR;$ENV{INCLUDE}";
+}
+
+#####################
+# UTILTIY FUNCTIONS #
+#####################
+
+# copy a file with error handling
+sub copy_or_die {
+ my $src = shift;
+ my $dest = shift;
+
+ copy($src, $dest) or die "Failed to copy $src to $dest: $!";
+}
+
+# Rename a file and deal with errors.
+sub rename_or_die {
+ my $src = shift;
+ my $dest = shift;
+
+ rename($src, $dest) or die "Failed to rename $src to $dest: $!";
+}
+
+# Utility function to chdir with error handling.
+sub chdir_or_die {
+ my $dir = shift;
+
+ chdir($dir) or die "Failed to chdir to $dir: $!";
+}
+
+# Utility function to call system with error handling.
+# First arg is an error message to print if something fails.
+# Remaining args are passed to system.
+sub system_or_die {
+ my $error_msg = shift;
+ unless (system(@_) == 0) {
+ if (defined($error_msg)) {
+ die "$error_msg (exit code: $?)";
+ } else {
+ die "Failed while running '@_' (exit code: $?)";
+ }
+ }
+}
+
+# Like perl -pi.orig the second arg is a reference to a
+# function that does whatever line processing you want.
+# Note that $_ is used for the input and output of the
+# function. So modifying $_ changes the line in the file.
+# bak can be passed to set the backup extension. If the
+# backup file already exists, shortcut this step.
+sub modify_file_in_place {
+ my $file = shift;
+ my $func = shift;
+ my $bak = shift;
+
+ unless (defined($bak)) {
+ $bak = '.orig';
+ }
+
+ my $backup = $file . $bak;
+ return if -e $backup;
+ rename_or_die($file, $backup);
+ open(IN, "<$backup") or die "Failed to open $backup: $!";
+ open(OUT, ">$file") or die "Failed to open $file: $!";
+ while (<IN>) {
+ &{$func}();
+ print OUT;
+ }
+ close(IN);
+ close(OUT);
+}
+
+sub check_vs_ver {
+ return if defined($VS_VER);
+
+ # using the vcupgrade command here because it has a consistent name and version
+ # numbering across versions including express versions.
+ my $help_output = `"$VCUPGRADE" /?`;
+ my ($major_version) = $help_output =~ /Version (\d+)\./s;
+
+ if (defined($major_version)) {
+ if ($major_version eq '12') {
+ $VS_VER = '2013';
+ return;
+ } elsif ($major_version eq '11') {
+ $VS_VER = '2012';
+ return;
+ } elsif ($major_version eq '10') {
+ $VS_VER = '2010';
+ return;
+ }
+ }
+
+ die("Visual Studio Version Not Supported");
+}
+
+##################
+# TREE STRUCTURE #
+##################
+
+# Create directories that this script directly needs
+sub prepare_structure {
+ # ignore errors the directories may already exist.
+ mkdir($BINDIR);
+ mkdir($SRCDIR);
+ mkdir($BLDDIR);
+ mkdir($LIBDIR);
+ mkdir($INCDIR);
+}
+
+# Remove paths created by this script (directly or indecirectly)
+# If the first arg is 1 it'll remove the downloaded files otherwise it
+# leaves them alone.
+sub clean_structure {
+ # ignore errors in this function the paths may not exist
+ my $real_clean = shift;
+
+ if ($real_clean) {
+ rmtree($SRCDIR);
+ }
+ rmtree($BINDIR);
+ rmtree($BLDDIR);
+ rmtree($INCDIR);
+ rmtree($LIBDIR);
+ rmtree("$INSTDIR\\serf");
+ rmtree("$INSTDIR\\neon");
+ rmtree("$INSTDIR\\sqlite-amalgamation");
+
+ # Dirs created indirectly by the install targets
+ rmtree("$INSTDIR\\man");
+ rmtree("$INSTDIR\\share");
+ rmtree("$INSTDIR\\ssl");
+ rmtree("$INSTDIR\\cgi-bin");
+ rmtree("$INSTDIR\\conf");
+ rmtree("$INSTDIR\\error");
+ rmtree("$INSTDIR\\htdocs");
+ rmtree("$INSTDIR\\icons");
+ rmtree("$INSTDIR\\logs");
+ rmtree("$INSTDIR\\manual");
+ rmtree("$INSTDIR\\modules");
+ unlink("$INSTDIR\\ABOUT_APACHE.txt");
+ unlink("$INSTDIR\\CHANGES.txt");
+ unlink("$INSTDIR\\INSTALL.txt");
+ unlink("$INSTDIR\\LICENSE.txt");
+ unlink("$INSTDIR\\NOTICE.txt");
+ unlink("$INSTDIR\\OPENSSL-NEWS.txt");
+ unlink("$INSTDIR\\OPENSSL-README.txt");
+ unlink("$INSTDIR\\README.txt");
+}
+
+############
+# DOWNLOAD #
+############
+
+# Download a url into a file if successful put the destination into the
+# variable referenced by $dest_ref.
+sub download_file {
+ my $url = shift;
+ my $file = shift;
+ my $dest_ref = shift;
+
+ # If the variable referenced by $dest_ref is already set, skip downloading
+ # means we've been asked to use an already downloaded file.
+ return if (defined($$dest_ref));
+
+ print "Downloading $url\n";
+ # Using mirror() here so that repeated runs shouldn't try to keep downloading
+ # the file.
+ my $response = mirror($url, $file);
+ if (is_error($response)) {
+ die "Couldn't save $url to $file received $response";
+ }
+ $$dest_ref = $file;
+}
+
+# Download all the dependencies we need
+sub download_dependencies {
+ # putting awk in sources is a bit of a hack but it lets us
+ # avoid having to figure out what to delete when cleaning bin
+ download_file($AWK_URL, "$SRCDIR\\awk.exe", \$AWK_FILE);
+ unless(-x "$BINDIR\\awk.exe") { # skip the copy if it exists
+ copy_or_die($AWK_FILE, "$BINDIR\\awk.exe");
+ }
+ download_file($PROJREF_URL, "$SRCDIR\\ProjRef.py", \$PROJREF_FILE);
+ unless(-x "$BINDIR\\ProjRef.py") { # skip the copy if it exists
+ copy_or_die($PROJREF_FILE, $BINDIR);
+ }
+ download_file($BDB_URL, "$SRCDIR\\db.zip", \$BDB_FILE);
+ download_file($ZLIB_URL, "$SRCDIR\\zlib.zip", \$ZLIB_FILE);
+ download_file($OPENSSL_URL, "$SRCDIR\\openssl.tar.gz", \$OPENSSL_FILE);
+ download_file($HTTPD_URL, "$SRCDIR\\httpd.tar.bz2", \$HTTPD_FILE);
+ download_file($APR_URL, "$SRCDIR\\apr.tar.bz2", \$APR_FILE);
+ download_file($APU_URL, "$SRCDIR\\apr-util.tar.bz2", \$APU_FILE);
+ download_file($API_URL, "$SRCDIR\\apr-iconv.tar.bz2", \$API_FILE);
+ download_file($PCRE_URL, "$SRCDIR\\pcre.zip", \$PCRE_FILE);
+ download_file($SQLITE_URL, "$SRCDIR\\sqlite-amalgamation.zip", \$SQLITE_FILE);
+ download_file($SERF_URL, "$SRCDIR\\serf.zip", \$SERF_FILE);
+ download_file($NEON_URL, "$SRCDIR\\neon.tar.gz", \$NEON_FILE) if $NEON;
+}
+
+##############
+# EXTRACTION #
+##############
+
+# Extract a compressed file with 7-zip into a given directory
+# Skip extraction if destination of rename_to or expected_name exists
+# if rename_to is set rename the path from expected_name to rename_to
+sub extract_file {
+ my $file = shift;
+ my $container = shift;
+ my $expected_name = shift;
+ my $rename_to = shift;
+
+ if (defined($rename_to)) {
+ return if -d $rename_to;
+ } elsif (defined($expected_name)) {
+ return if -d $expected_name;
+ }
+
+ my $dest_opt = "";
+ if (defined($container)) {
+ $dest_opt = qq(-o"$container" );
+ }
+
+ my $cmd;
+ if ($file =~ /\.tar\.(bz2|gz)$/) {
+ $cmd = qq("$SEVEN_ZIP" x "$file" -so | "$SEVEN_ZIP" x -y -si -ttar $dest_opt);
+ } else {
+ $cmd = qq("$SEVEN_ZIP" x -y $dest_opt $file);
+ }
+
+ system_or_die("Problem extracting $file", $cmd);
+ if (defined($rename_to)) {
+ rename_or_die($expected_name, $rename_to);
+ }
+}
+
+sub extract_dependencies {
+ extract_file($BDB_FILE, $BLDDIR,
+ "$BLDDIR\\db-$BDB_VER", "$BLDDIR\\bdb");
+ extract_file($HTTPD_FILE, $BLDDIR,
+ "$BLDDIR\\httpd-$HTTPD_VER", "$BLDDIR\\httpd");
+ extract_file($APR_FILE, $SRCLIB,
+ "$SRCLIB\\apr-$APR_VER", "$SRCLIB\\apr");
+ extract_file($APU_FILE, $SRCLIB,
+ "$SRCLIB\\apr-util-$APU_VER", "$SRCLIB\\apr-util");
+ extract_file($API_FILE, $SRCLIB,
+ "$SRCLIB\\apr-iconv-$API_VER", "$SRCLIB\\apr-iconv");
+ # We fix the line endings before putting the non-Apache deps in place since it
+ # touches everything under httpd and there's no point in doing other things.
+ httpd_fix_lineends();
+ extract_file($ZLIB_FILE, $SRCLIB,
+ "$SRCLIB\\zlib-$ZLIB_VER", "$SRCLIB\\zlib");
+ extract_file($OPENSSL_FILE, $SRCLIB,
+ "$SRCLIB\\openssl-$OPENSSL_VER", "$SRCLIB\\openssl");
+ extract_file($PCRE_FILE, $SRCLIB,
+ "$SRCLIB\\pcre-$PCRE_VER", "$SRCLIB\\pcre");
+ extract_file($SQLITE_FILE, $INSTDIR,
+ "$INSTDIR\\sqlite-amalgamation-$SQLITE_VER",
+ "$INSTDIR\\sqlite-amalgamation");
+ extract_file($SERF_FILE, $INSTDIR,
+ "$INSTDIR\\serf-$SERF_VER", "$INSTDIR\\serf");
+ extract_file($NEON_FILE, $INSTDIR,
+ "$INSTDIR\\neon-$NEON_VER", "$INSTDIR\\neon") if $NEON;
+}
+
+#########
+# BUILD #
+#########
+
+sub build_pcre {
+ chdir_or_die("$SRCLIB\\pcre");
+ my $pcre_generator = 'NMake Makefiles';
+ # Have to use RelWithDebInfo since httpd looks for the pdb files
+ my $pcre_build_type = '-DCMAKE_BUILD_TYPE:STRING=' . ($DEBUG ? 'Debug' : 'RelWithDebInfo');
+ my $pcre_options = '-DPCRE_NO_RECURSE:BOOL=ON';
+ my $pcre_shared_libs = '-DBUILD_SHARED_LIBS:BOOL=ON';
+ my $pcre_install_prefix = "-DCMAKE_INSTALL_PREFIX:PATH=$INSTDIR";
+ my $cmake_cmd = qq("$CMAKE" -G "$pcre_generator" "$pcre_build_type" "$pcre_shared_libs" "$pcre_install_prefix" "$pcre_options" .);
+ system_or_die("Failure generating pcre Makefiles", $cmake_cmd);
+ system_or_die("Failure building pcre", qq("$NMAKE"));
+ system_or_die("Failure testing pcre", qq("$NMAKE" test));
+ system_or_die("Failure installing pcre", qq("$NMAKE" install));
+ chdir_or_die($TOPDIR);
+}
+
+# This is based roughly off the build_zlib.bat that the Subversion Windows
+# build generates, it it doesn't match that then Subversion will fail to build.
+sub build_zlib {
+ chdir_or_die("$SRCLIB\\zlib");
+ $ENV{CC_OPTS} = $DEBUG ? '/MDd /Gm /ZI /Od /GZ /D_DEBUG' : '/MD /02 /Zi';
+ $ENV{COMMON_CC_OPTS} = '/nologo /W3 /DWIN32 /D_WINDOWS';
+
+ system_or_die("Failure building zilb", qq("$NMAKE" /nologo -f win32\\Makefile.msc STATICLIB=zlibstat.lib all));
+
+ delete $ENV{CC_OPTS};
+ delete $ENV{COMMON_CC_OPTS};
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_openssl {
+ chdir_or_die("$SRCLIB\\openssl");
+
+ # We're building openssl without an assembler. If someone wants to
+ # use this for production they should probably download NASM and
+ # remove the no-asm below and use ms\do_nasm.bat instead.
+
+ # TODO: Enable openssl to use zlib. openssl needs some patching to do
+ # this since it wants to look for zlib as zlib1.dll and as the httpd
+ # build instructions note you probably don't want to dynamic link zlib.
+
+ # TODO: OpenSSL requires perl on the path since it uses perl without a full
+ # path in the batch file and the makefiles. Probably should determine
+ # if PERL is on the path and add it here if not.
+
+ # The apache build docs suggest no-rc5 no-idea enable-mdc2 on top of what
+ # is used below, the primary driver behind that is patents, but I believe
+ # the rc5 and idea patents have expired.
+ my $platform = $DEBUG ? 'debug-VC-WIN32' : 'VC-WIN32';
+ system_or_die("Failure configuring openssl",
+ qq("$PERL" Configure no-asm "--prefix=$INSTDIR" $platform));
+ system_or_die("Failure building openssl (bat)", 'ms\do_ms.bat');
+ system_or_die("Failure building openssl (nmake)", qq("$NMAKE" /f ms\\ntdll.mak));
+ system_or_die("Failure testing openssl", qq("$NMAKE" /f ms\\ntdll.mak test));
+ system_or_die("Failure installing openssl",
+ qq("$NMAKE" /f ms\\ntdll.mak install));
+ chdir_or_die($TOPDIR);
+}
+
+# Run devenv /Upgrade on file.
+# If the file isn't a .sln file and the sln file isn't empty shortcut this
+# If the file isn't a .sln file touch the basename.sln of file to avoid
+# Visual Studio whining about its backup step.
+sub upgrade_solution {
+ my $file = shift;
+ my $interactive = shift;
+ my $flags = "";
+
+ my ($basename, $directories) = fileparse($file, qr/\.[^.]*$/);
+ my $sln = $directories . $basename . '.sln';
+ return if $file ne $sln and -s $sln; # shortcut if sln file is unique and isn't empty
+ # 'touch' the sln file so that Visual Studio 2012
+ # doesn't try to say there was an error while upgrading because
+ # it was unable to backup the original solution file.
+ unless (-e $sln) {
+ open(SLN, ">$sln") or die "Can't create $sln: $!";
+ close(SLN);
+ }
+ print "Upgrading $file (this may take a while)\n";
+ $flags = " /Upgrade" unless $interactive;
+ system_or_die("Failure upgrading $file", qq("$DEVENV" "$file"$flags));
+ if ($interactive) {
+ print "Can't do automatic upgrade, doing interactive upgrade\n";
+ print "IDE will load, choose to convert all projects, exit the IDE and\n";
+ print "save the resulting solution file\n\n";
+ print "Press Enter to Continue\n";
+ <>;
+ }
+}
+
+# Run the lineends.pl script
+sub httpd_fix_lineends {
+ chdir_or_die($HTTPD);
+ # This script fixes the lineendings to be CRLF in appropriate files.
+ # If we don't run this script then the DSW Upgrade will fail.
+ system_or_die(undef, qq("$PERL" "$SRCLIB\\apr\\build\\lineends.pl"));
+ chdir_or_die($TOPDIR);
+}
+
+# The httpd makefile in 2.4.4 doesn't know about .vcxproj files and
+# still thinks it's got an older version of Visual Studio because
+# .vcproj files have become .vcxproj.
+sub httpd_fix_makefile {
+ my $file = shift;
+
+ modify_file_in_place($file, sub {
+ s/\.vcproj/.vcxproj/i;
+ # below fixes that installd breaks when trying to install pcre because
+ # dll is named pcred.dll when a Debug build.
+ s/^(\s*copy srclib\\pcre\\pcre\.\$\(src_dll\)\s+"\$\(inst_dll\)"\s+<\s*\.y\s*)$/!IF EXISTS("srclib\\pcre\\pcre\.\$(src_dll)")\n$1!ENDIF\n!IF EXISTS("srclib\\pcre\\pcred\.\$(src_dll)")\n\tcopy srclib\\pcre\\pcred.\$(src_dll)\t\t\t"\$(inst_dll)" <.y\n!ENDIF\n/;
+ });
+}
+
+# This is a poor mans way of inserting a property group into a
+# vcxproj file. It assumes that the ending Project tag will
+# be the start and end of the line with no whitespace, probably
+# not an entirely valid assumption but it works in this case.
+sub insert_property_group {
+ my $file = shift;
+ my $xml = shift;
+ my $bak = shift;
+
+ modify_file_in_place($file, sub {
+ s#(^</Project>$)#<PropertyGroup>$xml</PropertyGroup>\n$1#i;
+ }, $bak);
+}
+
+# Strip pre-compiled headers compile and linker flags from file they follow
+# the form: /Ycfoo.h or /Yufoo.h.
+sub disable_pch {
+ my $file = shift;
+
+ modify_file_in_place($file, sub {
+ s#/Y[cu][^ ]+##;
+ });
+}
+
+# Find the first .exe .dll or .so OutputFile in the project
+# provided by file. There may be macros or paths in the
+# result.
+sub get_output_file {
+ my $file = shift;
+ my $result;
+ local $_; # Don't mess with the $_ from the find callback
+
+ open(IN, "<$file") or die "Couldn't open file $file: $!";
+ while (<IN>) {
+ if (m#<OutputFile>(.*?\.(?:exec|dll|so))</OutputFile>#) {
+ $result = $1;
+ last;
+ }
+ }
+ close(IN);
+ return $result;
+}
+
+# Find the name of the bdb library we've installed in our LIBDIR.
+sub find_bdb_lib {
+ my $result;
+ my $debug = $DEBUG ? 'd' : '';
+ find(sub {
+ if (not defined($result) and /^libdb\d+$debug\.lib$/) {
+ $result = $_;
+ }
+ }, $LIBDIR);
+ return $result;
+}
+
+# Insert the dependency dep into project file.
+# bak can be set to set the backup filename made of the project.
+sub insert_dependency_in_proj {
+ my $file = shift;
+ my $dep = shift;
+ my $bak = shift;
+
+ modify_file_in_place($file, sub {
+ s/(%\(AdditionalDependencies\))/$dep;$1/;
+ }, $bak);
+}
+
+# Do what's needed to enable BDB in the httpd and apr-util builds
+sub httpd_enable_bdb {
+ # Make APU_HAVE_DB be true so the code builds.
+ modify_file_in_place('srclib\apr-util\include\apu.hw', sub {
+ s/(#define\s+APU_HAVE_DB\s+)0/${1}1/;
+ });
+
+ # Fix the linkage, apr_dbm_db is hardcoded to libdb47.lib
+ my $bdb_lib = find_bdb_lib();
+ modify_file_in_place('srclib\apr-util\dbm\apr_dbm_db.vcxproj', sub {
+ s/libdb\d+\.lib/$bdb_lib/g;
+ }, '.bdb');
+
+ # httxt2dbm and htdbm need a BDB dependency and don't have one.
+ insert_dependency_in_proj('support\httxt2dbm.vcxproj', $bdb_lib, '.bdb');
+ insert_dependency_in_proj('support\htdbm.vcxproj', $bdb_lib, '.bdb');
+}
+
+# Apply the same fix as found in r1486937 on httpd 2.4.x branch.
+sub httpd_fix_debug {
+ my ($httpd_major, $httpd_minor, $httpd_patch) = $HTTPD_VER =~ /^(\d+)\.(\d+)\.(.+)$/;
+ return unless ($httpd_major <= 2 && $httpd_minor <= 4 && $httpd_patch < 5);
+
+ modify_file_in_place('libhttpd.dsp', sub {
+ s/^(!MESSAGE "libhttpd - Win32 Debug" \(based on "Win32 \(x86\) Dynamic-Link Library"\))$/$1\n!MESSAGE "libhttpd - Win32 Lexical" (based on "Win32 (x86) Dynamic-Link Library")/;
+ s/^(# Begin Group "headers")$/# Name "libhttpd - Win32 Lexical"\n$1/;
+ }, '.lexical');
+}
+
+sub build_httpd {
+ chdir_or_die($HTTPD);
+
+ my $vs_2013 = $VS_VER eq '2013';
+ my $vs_2012 = $VS_VER eq '2012';
+ my $vs_2010 = $VS_VER eq '2010';
+
+ httpd_fix_debug();
+
+ # I don't think cvtdsp.pl is necessary with Visual Studio 2012
+ # but it shouldn't hurt anything either. Including it allows
+ # for the possibility that this may work for older Visual Studio
+ # versions.
+ system_or_die("Failure converting DSP files",
+ qq("$PERL" srclib\\apr\\build\\cvtdsp.pl -2005));
+
+ upgrade_solution('Apache.dsw', $vs_2010);
+ httpd_enable_bdb();
+ httpd_fix_makefile('Makefile.win');
+
+ # Modules and support projects randomly fail due to an error about the
+ # CL.read.1.tlog file already existing. This is really because of the
+ # intermediate dirs being shared between modules, but for the time being
+ # this works around it.
+ find(sub {
+ if (/\.vcxproj$/) {
+ insert_property_group($_, '<TrackFileAccess>false</TrackFileAccess>')
+ }
+ }, 'modules', 'support');
+
+ if ($vs_2012 or $vs_2013) {
+ # Turn off pre-compiled headers for apr-iconv to avoid:
+ # LNK2011: http://msdn.microsoft.com/en-us/library/3ay26wa2(v=vs.110).aspx
+ disable_pch('srclib\apr-iconv\build\modules.mk.win');
+
+ # ApacheMonitor build fails due a duplicate manifest, turn off
+ # GenerateManifest
+ insert_property_group('support\win32\ApacheMonitor.vcxproj',
+ '<GenerateManifest>false</GenerateManifest>',
+ '.dupman');
+
+ # The APR libraries have projects named libapr but produce output named libapr-1
+ # The problem with this is in newer versions of Visual Studio TargetName defaults
+ # to the project name and not the basename of the output. Since the PDB file
+ # is named based on the TargetName the pdb file ends up being named libapr.pdb
+ # instead of libapr-1.pdb. The below call fixes this by explicitly providing
+ # a TargetName definition and shuts up some warnings about this problem as well.
+ # Without this fix the install fails when it tries to copy libapr-1.pdb.
+ # See this thread for details of the changes:
+ # http://social.msdn.microsoft.com/Forums/en-US/vcprerelease/thread/3c03e730-6a0e-4ee4-a0d6-6a5c3ce4343c
+ find(sub {
+ return unless (/\.vcxproj$/);
+ my $output_file = get_output_file($_);
+ return unless (defined($output_file));
+ my ($project_name) = fileparse($_, qr/\.[^.]*$/);
+ my ($old_style_target_name) = fileparse($output_file, qr/\.[^.]*$/);
+ return if ($old_style_target_name eq $project_name);
+ insert_property_group($_,
+ "<TargetName>$old_style_target_name</TargetName>", '.torig');
+ }, "$SRCLIB\\apr", "$SRCLIB\\apr-util", "$SRCLIB\\apr-iconv");
+ } elsif ($vs_2010) {
+ system_or_die("Failed fixing project guid references",
+ qq("$PYTHON" "$BINDIR\\ProjRef.py" -i Apache.sln"));
+ }
+
+ # If you're looking here it's possible that something went
+ # wrong with the httpd build. Debugging it can be a bit of a pain
+ # when using this script. There are log files created in the
+ # Release dirs named with the same basename as the project. E.G.
+ # for support\httxt2dbm.vcxproj you can find the log in
+ # support\Release\httxt2dbm.log. You can also run a similar build
+ # from in the IDE, but you'll need to disable some projects since
+ # they are separately driven by the Makefile.win. Grepping for
+ # '/project' in Makefile.win should tell you which projects. You'll
+ # also need to add the bin, include and lib paths to the appropriate
+ # configurations inside the project since we get them from the environment.
+ # Once all that is done the BuildBin project should be buildable for you to
+ # diagnose the problem.
+ my $target = $DEBUG ? "installd" : "installr";
+ system_or_die("Failed building/installing httpd/apr/apu/api",
+ qq("$NMAKE" /f Makefile.win $target "DBM_LIST=db" "INSTDIR=$INSTDIR"));
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_bdb {
+ chdir_or_die($BDB);
+
+ print(cwd(),$/);
+ my $sln = 'build_windows\Berkeley_DB_vs2010.sln';
+ upgrade_solution($sln);
+
+ my $platform = $DEBUG ? 'Debug|Win32' : 'Release|Win32';
+
+ # Build the db Project first since the full solution fails due to a broken
+ # dependency with the current version of BDB if we don't.
+ system_or_die("Failed building DBD (Project db)",
+ qq("$DEVENV" "$sln" /Build "$platform" /Project db));
+
+ system_or_die("Failed building DBD",
+ qq("$DEVENV" "$sln" /Build "$platform"));
+
+ # BDB doesn't seem to have it's own install routines so we'll do it ourselves
+ copy_or_die('build_windows\db.h', $INCDIR);
+ find(sub {
+ if (/\.(exe|dll|pdb)$/) {
+ copy_or_die($_, $BINDIR);
+ } elsif (/\.lib$/) {
+ copy_or_die($_, $LIBDIR);
+ }
+ }, 'build_windows\\Win32\\' . ($DEBUG ? 'Debug' : 'Release'));
+
+ chdir_or_die($TOPDIR);
+}
+
+# Right now this doesn't actually build serf but just patches it so that it
+# can build against a debug build of OpenSSL.
+sub build_serf {
+ chdir_or_die("$TOPDIR\\serf");
+
+ modify_file_in_place('serf.mak', sub {
+ s/^(INTDIR = Release)$/$1\nOPENSSL_OUT_SUFFIX =/;
+ s/^(INTDIR = Debug)$/$1\nOPENSSL_OUT_SUFFIX = .dbg/;
+ s/(\$\(OPENSSL_SRC\)\\out32(?:dll)?)/$1\$(OPENSSL_OUT_SUFFIX)/g;
+ }, '.debug');
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_dependencies {
+ build_bdb();
+ build_zlib();
+ build_pcre();
+ build_openssl();
+ build_serf();
+ build_httpd();
+}
+
+###############
+# COMMANDLINE #
+###############
+
+# Implement an interface somewhat similar to the make command line
+# You can give a list of commands and variable assignments interspersed.
+# Variable assignments are always VAR=VALUE with no spaces (in a single
+# argv entry).
+sub main {
+ my @commands;
+ while (my $arg = shift @ARGV) {
+ # Look for variable assignment
+ if (my ($lhs, $rhs) = $arg =~ /([^=]+)=(.*)/) {
+ # Bit of hackery to allow the global values in the
+ # Vars package to be overriden from the command line.
+ # E.G. "CMAKE=C:\CMake\cmake.exe" would replace the
+ # default value with this value.
+ if (exists($Vars::{$lhs})) {
+ ${$Vars::{$lhs}} = $rhs;
+ } else {
+ # Don't allow variables that don't exist already to be touched.
+ die "$lhs is an unknown variable.";
+ }
+ } else {
+ # Not a variable so must be a command
+ push @commands, $arg;
+ }
+ }
+
+ # No commands so add the implicit all command
+ if ($#commands == -1) {
+ push @commands, 'all';
+ }
+
+ # Set defaults and paths that have to be set at runtime since they are based
+ # on other variables.
+ Vars::set_defaults();
+ set_paths();
+
+ # Determine the Visual Studio Version and die if not supported.
+ check_vs_ver();
+
+ # change directory to our TOPDIR before running any commands
+ # the variable assignment might have changed it.
+ chdir_or_die($TOPDIR);
+
+ # Run the commands in the order given.
+ foreach my $command (@commands) {
+ if ($command eq 'clean') {
+ clean_structure(0);
+ } elsif ($command eq 'real-clean') {
+ clean_structure(1);
+ } elsif ($command eq 'prepare') {
+ prepare_structure();
+ } elsif ($command eq 'download') {
+ download_dependencies();
+ } elsif ($command eq 'extract') {
+ extract_dependencies();
+ } elsif ($command eq 'all') {
+ prepare_structure();
+ download_dependencies();
+ extract_dependencies();
+ build_dependencies();
+ } else {
+ die "Command '$command' is unknown";
+ }
+ }
+}
+
+main();
diff --git a/tools/dev/check-license.py b/tools/dev/check-license.py
new file mode 100755
index 0000000..37041be
--- /dev/null
+++ b/tools/dev/check-license.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+#
+# check if a file has the proper license in it
+#
+# USAGE: check-license.py [-C] file1 file2 ... fileN
+#
+# A 'file' may in fact be a directory, in which case it is recursively
+# searched.
+#
+# If the license cannot be found, then the filename is printed to stdout.
+# Typical usage:
+# $ check-license.py . > bad-files
+#
+# -C switch is used to change licenses.
+# Typical usage:
+# $ check-license.py -C file1 file2 ... fileN
+#
+
+import sys, os, re
+
+# Note: Right now, OLD_LICENSE and NEW_LICENSE are the same, because
+# r878444 updated all the license blocks. In the future, if we update
+# the license block again, change just NEW_LICENSE and use this script.
+
+OLD_LICENSE = '''\
+ \* ====================================================================
+ \* Licensed to the Subversion Corporation \(SVN Corp\.\) under one
+ \* or more contributor license agreements\. See the NOTICE file
+ \* distributed with this work for additional information
+ \* regarding copyright ownership\. The SVN Corp\. licenses this file
+ \* to you under the Apache License, Version 2\.0 \(the
+ \* "License"\); you may not use this file except in compliance
+ \* with the License\. You may obtain a copy of the License at
+ \*
+ \* http://www\.apache\.org/licenses/LICENSE-2\.0
+ \*
+ \* Unless required by applicable law or agreed to in writing,
+ \* software distributed under the License is distributed on an
+ \* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ \* KIND, either express or implied\. See the License for the
+ \* specific language governing permissions and limitations
+ \* under the License\.
+ \* ====================================================================
+'''
+
+SH_OLD_LICENSE = re.subn(r'(?m)^ \\\*', '#', OLD_LICENSE)[0]
+
+# Remember not to do regexp quoting for NEW_LICENSE. Only OLD_LICENSE
+# is used for matching; NEW_LICENSE is inserted as-is.
+NEW_LICENSE = '''\
+ * ====================================================================
+ * Licensed to the Subversion Corporation (SVN Corp.) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The SVN Corp. licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+'''
+
+SH_NEW_LICENSE = re.subn(r'(?m)^ \*', '#', NEW_LICENSE)[0]
+
+re_OLD = re.compile(OLD_LICENSE)
+re_SH_OLD = re.compile(SH_OLD_LICENSE)
+re_EXCLUDE = re.compile(
+ r'automatically generated by SWIG'
+ + r'|Generated from configure\.in'
+ + r'|placed into the public domain'
+ )
+
+c_comment_suffices = ('.c', '.java', '.h', '.cpp', '.hw', '.pas')
+
+# Yes, this is an empty tuple. No types that fit in this category uniformly
+# have a copyright block.
+# Possible types to add here:
+# ('.bat', '.py', '.pl', '.in')
+sh_comment_suffices = ()
+
+def check_file(fname, old_re, new_lic):
+ s = open(fname).read()
+ if (not old_re.search(s)
+ and not re_EXCLUDE.search(s)):
+ print(fname)
+
+def change_license(fname, old_re, new_lic):
+ s = open(fname).read()
+ m = old_re.search(s)
+ if not m:
+ print('ERROR: missing old license: %s' % fname)
+ else:
+ s = s[:m.start()] + new_lic + s[m.end():]
+ open(fname, 'w').write(s)
+ print('Changed: %s' % fname)
+
+def visit(baton, dirname, dircontents):
+ file_func = baton
+ for i in dircontents:
+ # Don't recurse into certain directories
+ if i in ('.svn', '.libs'):
+ dircontents.remove(i)
+ continue
+
+ extension = os.path.splitext(i)[1]
+ fullname = os.path.join(dirname, i)
+
+ if os.path.isdir(fullname):
+ continue
+
+ if extension in c_comment_suffices:
+ file_func(fullname, re_OLD, NEW_LICENSE)
+ elif extension in sh_comment_suffices:
+ file_func(fullname, re_SH_OLD, SH_NEW_LICENSE)
+
+def main():
+ file_func = check_file
+ if sys.argv[1] == '-C':
+ print('Changing license text...')
+ del sys.argv[1]
+ file_func = change_license
+
+ for f in sys.argv[1:]:
+ if os.path.isdir(f):
+ baton = file_func
+ for dirpath, dirs, files in os.walk(f):
+ visit(baton, dirpath, dirs + files)
+ else:
+ baton = file_func
+ dir, i = os.path.split(f)
+ visit(baton, dir, i)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/contribulyze.py b/tools/dev/contribulyze.py
new file mode 100755
index 0000000..8afc608
--- /dev/null
+++ b/tools/dev/contribulyze.py
@@ -0,0 +1,767 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# See usage() for details, or run with --help option.
+#
+# .-------------------------------------------------.
+# | "An ad hoc format deserves an ad hoc parser." |
+# `-------------------------------------------------'
+#
+# Some Subversion project log messages include parseable data to help
+# track who's contributing what. The exact syntax is described in
+# http://subversion.apache.org/docs/community-guide/conventions.html#crediting,
+# but here's an example, indented by three spaces, i.e., the "Patch by:"
+# starts at the beginning of a line:
+#
+# Patch by: David Anderson <david.anderson@calixo.net>
+# <justin@erenkrantz.com>
+# me
+# (I wrote the regression tests.)
+# Found by: Phineas T. Phinder <phtph@ph1nderz.com>
+# Suggested by: Snosbig Q. Ptermione <sqptermione@example.com>
+# Review by: Justin Erenkrantz <justin@erenkrantz.com>
+# rooneg
+# (They caught an off-by-one error in the main loop.)
+#
+# This is a pathological example, but it shows all the things we might
+# need to parse. We need to:
+#
+# - Detect the officially-approved "WORD by: " fields.
+# - Grab every name (one per line) in each field.
+# - Handle names in various formats, unifying where possible.
+# - Expand "me" to the committer name for this revision.
+# - Associate a parenthetical aside following a field with that field.
+#
+# NOTES: You might be wondering, why not take 'svn log --xml' input?
+# Well, that would be the Right Thing to do, but in practice this was
+# a lot easier to whip up for straight 'svn log' output. I'd have no
+# objection to it being rewritten to take XML input.
+
+import os
+import sys
+import re
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+try:
+ # Python >=3.0
+ from urllib.parse import quote as urllib_parse_quote
+except ImportError:
+ # Python <3.0
+ from urllib import quote as urllib_parse_quote
+
+
+# Warnings and errors start with these strings. They are typically
+# followed by a colon and a space, as in "%s: " ==> "WARNING: ".
+warning_prefix = 'WARNING'
+error_prefix = 'ERROR'
+
+def complain(msg, fatal=False):
+ """Print MSG as a warning, or if FATAL is true, print it as an error
+ and exit."""
+ prefix = 'WARNING: '
+ if fatal:
+ prefix = 'ERROR: '
+ sys.stderr.write(prefix + msg + '\n')
+ if fatal:
+ sys.exit(1)
+
+
+def html_spam_guard(addr, entities_only=False):
+ """Return a spam-protected version of email ADDR that renders the
+ same in HTML as the original address. If ENTITIES_ONLY, use a less
+ thorough mangling scheme involving entities only, avoiding the use
+ of tags."""
+ if entities_only:
+ def mangle(x):
+ return "&#%d;" % ord (x)
+ else:
+ def mangle(x):
+ return "<span>&#%d;</span>" % ord(x)
+ return "".join(map(mangle, addr))
+
+
+def escape_html(str):
+ """Return an HTML-escaped version of STR."""
+ return str.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
+
+
+_spam_guard_in_html_block_re = re.compile(r'&lt;([^&]*@[^&]*)&gt;')
+def _spam_guard_in_html_block_func(m):
+ return "&lt;%s&gt;" % html_spam_guard(m.group(1))
+def spam_guard_in_html_block(str):
+ """Take a block of HTML data, and run html_spam_guard() on parts of it."""
+ return _spam_guard_in_html_block_re.subn(_spam_guard_in_html_block_func,
+ str)[0]
+
+def html_header(title, page_heading=None, highlight_targets=False):
+ """Write HTML file header. TITLE and PAGE_HEADING parameters are
+ expected to already by HTML-escaped if needed. If HIGHLIGHT_TARGETS
+is true, then write out a style header that causes anchor targets to be
+surrounded by a red border when they are jumped to."""
+ if not page_heading:
+ page_heading = title
+ s = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"\n'
+ s += ' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n'
+ s += '<html><head>\n'
+ s += '<meta http-equiv="Content-Type"'
+ s += ' content="text/html; charset=UTF-8" />\n'
+ if highlight_targets:
+ s += '<style type="text/css">\n'
+ s += ':target { border: 2px solid red; }\n'
+ s += '</style>\n'
+ s += '<title>%s</title>\n' % title
+ s += '</head>\n\n'
+ s += '<body style="text-color: black; background-color: white">\n\n'
+ s += '<h1 style="text-align: center">%s</h1>\n\n' % page_heading
+ s += '<hr />\n\n'
+ return s
+
+
+def html_footer():
+ return '\n</body>\n</html>\n'
+
+
+class Contributor(object):
+ # Map contributor names to contributor instances, so that there
+ # exists exactly one instance associated with a given name.
+ # Fold names with email addresses. That is, if we see someone
+ # listed first with just an email address, but later with a real
+ # name and that same email address together, we create only one
+ # instance, and store it under both the email and the real name.
+ all_contributors = { }
+
+ def __init__(self, username, real_name, email):
+ """Instantiate a contributor. Don't use this to generate a
+ Contributor for an external caller, though, use .get() instead."""
+ self.real_name = real_name
+ self.username = username
+ self.email = email
+ self.is_committer = False # Assume not until hear otherwise.
+ self.is_full_committer = False # Assume not until hear otherwise.
+ # Map verbs (e.g., "Patch", "Suggested", "Review") to lists of
+ # LogMessage objects. For example, the log messages stored under
+ # "Patch" represent all the revisions for which this contributor
+ # contributed a patch.
+ self.activities = { }
+
+ def add_activity(self, field_name, log):
+ """Record that this contributor was active in FIELD_NAME in LOG."""
+ logs = self.activities.get(field_name)
+ if not logs:
+ logs = [ ]
+ self.activities[field_name] = logs
+ if not log in logs:
+ logs.append(log)
+
+ @staticmethod
+ def get(username, real_name, email):
+ """If this contributor is already registered, just return it;
+ otherwise, register it then return it. Hint: use parse() to
+ generate the arguments."""
+ c = None
+ for key in username, real_name, email:
+ if key and key in Contributor.all_contributors:
+ c = Contributor.all_contributors[key]
+ break
+ # If we didn't get a Contributor, create one now.
+ if not c:
+ c = Contributor(username, real_name, email)
+ # If we know identifying information that the Contributor lacks,
+ # then give it to the Contributor now.
+ if username:
+ if not c.username:
+ c.username = username
+ Contributor.all_contributors[username] = c
+ if real_name:
+ if not c.real_name:
+ c.real_name = real_name
+ Contributor.all_contributors[real_name] = c
+ if email:
+ if not c.email:
+ c.email = email
+ Contributor.all_contributors[email] = c
+ # This Contributor has never been in better shape; return it.
+ return c
+
+ def score(self):
+ """Return a contribution score for this contributor."""
+ # Right now we count a patch as 2, anything else as 1.
+ score = 0
+ for activity in self.activities.keys():
+ if activity == 'Patch':
+ score += len(self.activities[activity]) * 2
+ else:
+ score += len(self.activities[activity])
+ return score
+
+ def score_str(self):
+ """Return a contribution score HTML string for this contributor."""
+ patch_score = 0
+ other_score = 0
+ for activity in self.activities.keys():
+ if activity == 'Patch':
+ patch_score += len(self.activities[activity])
+ else:
+ other_score += len(self.activities[activity])
+ if patch_score == 0:
+ patch_str = ""
+ elif patch_score == 1:
+ patch_str = "1&nbsp;patch"
+ else:
+ patch_str = "%d&nbsp;patches" % patch_score
+ if other_score == 0:
+ other_str = ""
+ elif other_score == 1:
+ other_str = "1&nbsp;non-patch"
+ else:
+ other_str = "%d&nbsp;non-patches" % other_score
+ if patch_str:
+ if other_str:
+ return ",&nbsp;".join((patch_str, other_str))
+ else:
+ return patch_str
+ else:
+ return other_str
+
+ def __cmp__(self, other):
+ if self.is_full_committer and not other.is_full_committer:
+ return 1
+ if other.is_full_committer and not self.is_full_committer:
+ return -1
+ result = cmp(self.score(), other.score())
+ if result == 0:
+ return cmp(self.big_name(), other.big_name())
+ else:
+ return 0 - result
+
+ @staticmethod
+ def parse(name):
+ """Parse NAME, which can be
+
+ - A committer username, or
+ - A space-separated real name, or
+ - A space-separated real name followed by an email address in
+ angle brackets, or
+ - Just an email address in angle brackets.
+
+ (The email address may have '@' disguised as '{_AT_}'.)
+
+ Return a tuple of (committer_username, real_name, email_address)
+ any of which can be None if not available in NAME."""
+ username = None
+ real_name = None
+ email = None
+ name_components = name.split()
+ if len(name_components) == 1:
+ name = name_components[0] # Effectively, name = name.strip()
+ if name[0] == '<' and name[-1] == '>':
+ email = name[1:-1]
+ elif name.find('@') != -1 or name.find('{_AT_}') != -1:
+ email = name
+ else:
+ username = name
+ elif name_components[-1][0] == '<' and name_components[-1][-1] == '>':
+ real_name = ' '.join(name_components[0:-1])
+ email = name_components[-1][1:-1]
+ else:
+ real_name = ' '.join(name_components)
+
+ if email is not None:
+ # We unobfuscate here and work with the '@' internally, since
+ # we'll obfuscate it again (differently) before writing it out.
+ email = email.replace('{_AT_}', '@')
+
+ return username, real_name, email
+
+ def canonical_name(self):
+ """Return a canonical name for this contributor. The canonical
+ name may or may not be based on the contributor's actual email
+ address.
+
+ The canonical name will not contain filename-unsafe characters.
+
+ This method is guaranteed to return the same canonical name every
+ time only if no further contributions are recorded from this
+ contributor after the first call. This is because a contribution
+ may bring a new form of the contributor's name, one which affects
+ the algorithm used to construct canonical names."""
+ retval = None
+ if self.username:
+ retval = self.username
+ elif self.email:
+ # Take some rudimentary steps to shorten the email address, to
+ # make it more manageable. If this is ever discovered to result
+ # in collisions, we can always just use to the full address.
+ try:
+ at_posn = self.email.index('@')
+ first_dot_after_at = self.email.index('.', at_posn)
+ retval = self.email[0:first_dot_after_at]
+ except ValueError:
+ retval = self.email
+ elif self.real_name:
+ # Last resort: construct canonical name based on real name.
+ retval = ''.join(self.real_name.lower().split(' '))
+ if retval is None:
+ complain('Unable to construct a canonical name for Contributor.', True)
+ return urllib_parse_quote(retval, safe="!#$&'()+,;<=>@[]^`{}~")
+
+ def big_name(self, html=False, html_eo=False):
+ """Return as complete a name as possible for this contributor.
+ If HTML, then call html_spam_guard() on email addresses.
+ If HTML_EO, then do the same, but specifying entities_only mode."""
+ html = html or html_eo
+ name_bits = []
+ if self.real_name:
+ if html:
+ name_bits.append(escape_html(self.real_name))
+ else:
+ name_bits.append(self.real_name)
+ if self.email:
+ if not self.real_name and not self.username:
+ name_bits.append(self.email)
+ elif html:
+ name_bits.append("&lt;%s&gt;" % html_spam_guard(self.email, html_eo))
+ else:
+ name_bits.append("<%s>" % self.email)
+ if self.username:
+ if not self.real_name and not self.email:
+ name_bits.append(self.username)
+ else:
+ name_bits.append("(%s)" % self.username)
+ return " ".join(name_bits)
+
+ def __str__(self):
+ s = 'CONTRIBUTOR: '
+ s += self.big_name()
+ s += "\ncanonical name: '%s'" % self.canonical_name()
+ if len(self.activities) > 0:
+ s += '\n '
+ for activity in self.activities.keys():
+ val = self.activities[activity]
+ s += '[%s:' % activity
+ for log in val:
+ s += ' %s' % log.revision
+ s += ']'
+ return s
+
+ def html_out(self, revision_url_pattern, filename):
+ """Create an HTML file named FILENAME, showing all the revisions in which
+ this contributor was active."""
+ out = open(filename, 'w')
+ out.write(html_header(self.big_name(html_eo=True),
+ self.big_name(html=True), True))
+ unique_logs = { }
+
+ sorted_activities = sorted(self.activities.keys())
+
+ out.write('<div class="h2" id="activities" title="activities">\n\n')
+ out.write('<table border="1">\n')
+ out.write('<tr>\n')
+ for activity in sorted_activities:
+ out.write('<td>%s</td>\n\n' % activity)
+ out.write('</tr>\n')
+ out.write('<tr>\n')
+ for activity in sorted_activities:
+ out.write('<td>\n')
+ first_activity = True
+ for log in self.activities[activity]:
+ s = ',\n'
+ if first_activity:
+ s = ''
+ first_activity = False
+ out.write('%s<a href="#%s">%s</a>' % (s, log.revision, log.revision))
+ unique_logs[log] = True
+ out.write('</td>\n')
+ out.write('</tr>\n')
+ out.write('</table>\n\n')
+ out.write('</div>\n\n')
+
+ sorted_logs = sorted(unique_logs.keys())
+ for log in sorted_logs:
+ out.write('<hr />\n')
+ out.write('<div class="h3" id="%s" title="%s">\n' % (log.revision,
+ log.revision))
+ out.write('<pre>\n')
+ if revision_url_pattern:
+ revision_url = revision_url_pattern % log.revision[1:]
+ revision = '<a href="%s">%s</a>' \
+ % (escape_html(revision_url), log.revision)
+ else:
+ revision = log.revision
+ out.write('<b>%s | %s | %s</b>\n\n' % (revision,
+ escape_html(log.committer),
+ escape_html(log.date)))
+ out.write(spam_guard_in_html_block(escape_html(log.message)))
+ out.write('</pre>\n')
+ out.write('</div>\n\n')
+ out.write('<hr />\n')
+
+ out.write(html_footer())
+ out.close()
+
+
+class Field:
+ """One field in one log message."""
+ def __init__(self, name, alias = None):
+ # The name of this field (e.g., "Patch", "Review", etc).
+ self.name = name
+ # An alias for the name of this field (e.g., "Reviewed").
+ self.alias = alias
+ # A list of contributor objects, in the order in which they were
+ # encountered in the field.
+ self.contributors = [ ]
+ # Any parenthesized asides immediately following the field. The
+ # parentheses and trailing newline are left on. In theory, this
+ # supports concatenation of consecutive asides. In practice, the
+ # parser only detects the first one anyway, because additional
+ # ones are very uncommon and furthermore by that point one should
+ # probably be looking at the full log message.
+ self.addendum = ''
+ def add_contributor(self, contributor):
+ self.contributors.append(contributor)
+ def add_endum(self, addendum):
+ self.addendum += addendum
+ def __str__(self):
+ s = 'FIELD: %s (%d contributors)\n' % (self.name, len(self.contributors))
+ for contributor in self.contributors:
+ s += str(contributor) + '\n'
+ s += self.addendum
+ return s
+
+
+class LogMessage(object):
+ # Maps revision strings (e.g., "r12345") onto LogMessage instances,
+ # holding all the LogMessage instances ever created.
+ all_logs = { }
+ # Keep track of youngest rev.
+ max_revnum = 0
+ def __init__(self, revision, committer, date):
+ """Instantiate a log message. All arguments are strings,
+ including REVISION, which should retain its leading 'r'."""
+ self.revision = revision
+ self.committer = committer
+ self.date = date
+ self.message = ''
+ # Map field names (e.g., "Patch", "Review", "Suggested") onto
+ # Field objects.
+ self.fields = { }
+ if revision in LogMessage.all_logs:
+ complain("Revision '%s' seen more than once" % revision, True)
+ LogMessage.all_logs[revision] = self
+ rev_as_number = int(revision[1:])
+ if rev_as_number > LogMessage.max_revnum:
+ LogMessage.max_revnum = rev_as_number
+ def add_field(self, field):
+ self.fields[field.name] = field
+ def accum(self, line):
+ """Accumulate one more line of raw message."""
+ self.message += line
+
+ def __cmp__(self, other):
+ """Compare two log messages by revision number, for sort().
+ Return -1, 0 or 1 depending on whether a > b, a == b, or a < b.
+ Note that this is reversed from normal sorting behavior, but it's
+ what we want for reverse chronological ordering of revisions."""
+ a = int(self.revision[1:])
+ b = int(other.revision[1:])
+ if a > b: return -1
+ if a < b: return 1
+ else: return 0
+
+ def __str__(self):
+ s = '=' * 15
+ header = ' LOG: %s | %s ' % (self.revision, self.committer)
+ s += header
+ s += '=' * 15
+ s += '\n'
+ for field_name in self.fields.keys():
+ s += str(self.fields[field_name]) + '\n'
+ s += '-' * 15
+ s += '-' * len(header)
+ s += '-' * 15
+ s += '\n'
+ return s
+
+
+
+### Code to parse the logs. ##
+
+log_separator = '-' * 72 + '\n'
+log_header_re = re.compile\
+ ('^(r[0-9]+) \| ([^|]+) \| ([^|]+) \| ([0-9]+)[^0-9]')
+field_re = re.compile(
+ '^(Patch|Review(ed)?|Suggested|Found|Inspired|Tested|Reported) by:'
+ '\s*\S.*$')
+field_aliases = {
+ 'Reviewed' : 'Review',
+ 'Reported' : 'Found',
+}
+parenthetical_aside_re = re.compile('^\s*\(.*\)\s*$')
+
+def graze(input):
+ just_saw_separator = False
+
+ while True:
+ line = input.readline()
+ if line == '': break
+ if line == log_separator:
+ if just_saw_separator:
+ sys.stderr.write('Two separators in a row.\n')
+ sys.exit(1)
+ else:
+ just_saw_separator = True
+ num_lines = None
+ continue
+ else:
+ if just_saw_separator:
+ m = log_header_re.match(line)
+ if not m:
+ sys.stderr.write('Could not match log message header.\n')
+ sys.stderr.write('Line was:\n')
+ sys.stderr.write("'%s'\n" % line)
+ sys.exit(1)
+ else:
+ log = LogMessage(m.group(1), m.group(2), m.group(3))
+ num_lines = int(m.group(4))
+ just_saw_separator = False
+ saw_patch = False
+ line = input.readline()
+ # Handle 'svn log -v' by waiting for the blank line.
+ while line != '\n':
+ line = input.readline()
+ # Parse the log message.
+ field = None
+ while num_lines > 0:
+ line = input.readline()
+ log.accum(line)
+ m = field_re.match(line)
+ if m:
+ # We're on the first line of a field. Parse the field.
+ while m:
+ if not field:
+ ident = m.group(1)
+ if ident in field_aliases:
+ field = Field(field_aliases[ident], ident)
+ else:
+ field = Field(ident)
+ # Each line begins either with "WORD by:", or with whitespace.
+ in_field_re = re.compile('^('
+ + (field.alias or field.name)
+ + ' by:\s+|\s+)([^\s(].*)')
+ m = in_field_re.match(line)
+ if m is None:
+ sys.stderr.write("Error matching: %s\n" % (line))
+ user, real, email = Contributor.parse(m.group(2))
+ if user == 'me':
+ user = log.committer
+ c = Contributor.get(user, real, email)
+ c.add_activity(field.name, log)
+ if (field.name == 'Patch'):
+ saw_patch = True
+ field.add_contributor(c)
+ line = input.readline()
+ if line == log_separator:
+ # If the log message doesn't end with its own
+ # newline (that is, there's the newline added by the
+ # svn client, but no further newline), then just move
+ # on to the next log entry.
+ just_saw_separator = True
+ num_lines = 0
+ break
+ log.accum(line)
+ num_lines -= 1
+ m = in_field_re.match(line)
+ if not m:
+ m = field_re.match(line)
+ if not m:
+ aside_match = parenthetical_aside_re.match(line)
+ if aside_match:
+ field.add_endum(line)
+ log.add_field(field)
+ field = None
+ num_lines -= 1
+ if not saw_patch and log.committer != '(no author)':
+ c = Contributor.get(log.committer, None, None)
+ c.add_activity('Patch', log)
+ continue
+
+index_introduction = '''
+<p>The following list of contributors and their contributions is meant
+to help us keep track of whom to consider for commit access. The list
+was generated from "svn&nbsp;log" output by <a
+href="http://svn.apache.org/repos/asf/subversion/trunk/tools/dev/contribulyze.py"
+>contribulyze.py</a>, which looks for log messages that use the <a
+href="http://subversion.apache.org/docs/community-guide/conventions.html#crediting"
+>special contribution format</a>.</p>
+
+<p><i>Please do not use this list as a generic guide to who has
+contributed what to Subversion!</i> It omits existing <a
+href="http://svn.apache.org/repos/asf/subversion/trunk/COMMITTERS"
+>full committers</a>, for example, because they are irrelevant to our
+search for new committers. Also, it merely counts changes, it does
+not evaluate them. To truly understand what someone has contributed,
+you have to read their changes in detail. This page can only assist
+human judgement, not substitute for it.</p>
+
+'''
+
+def drop(revision_url_pattern):
+ # Output the data.
+ #
+ # The data structures are all linked up nicely to one another. You
+ # can get all the LogMessages, and each LogMessage contains all the
+ # Contributors involved with that commit; likewise, each Contributor
+ # points back to all the LogMessages it contributed to.
+ #
+ # However, the HTML output is pretty simple right now. It's not take
+ # full advantage of all that cross-linking. For each contributor, we
+ # just create a file listing all the revisions contributed to; and we
+ # build a master index of all contributors, each name being a link to
+ # that contributor's individual file. Much more is possible... but
+ # let's just get this up and running first.
+
+ for key in LogMessage.all_logs.keys():
+ # You could print out all log messages this way, if you wanted to.
+ pass
+ # print LogMessage.all_logs[key]
+
+ detail_subdir = "detail"
+ if not os.path.exists(detail_subdir):
+ os.mkdir(detail_subdir)
+
+ index = open('index.html', 'w')
+ index.write(html_header('Contributors as of r%d' % LogMessage.max_revnum))
+ index.write(index_introduction)
+ index.write('<ol>\n')
+ # The same contributor appears under multiple keys, so uniquify.
+ seen_contributors = { }
+ # Sorting alphabetically is acceptable, but even better would be to
+ # sort by number of contributions, so the most active people appear at
+ # the top -- that way we know whom to look at first for commit access
+ # proposals.
+ sorted_contributors = sorted(Contributor.all_contributors.values())
+ for c in sorted_contributors:
+ if c not in seen_contributors:
+ if c.score() > 0:
+ if c.is_full_committer:
+ # Don't even bother to print out full committers. They are
+ # a distraction from the purposes for which we're here.
+ continue
+ else:
+ committerness = ''
+ if c.is_committer:
+ committerness = '&nbsp;(partial&nbsp;committer)'
+ urlpath = "%s/%s.html" % (detail_subdir, c.canonical_name())
+ fname = os.path.join(detail_subdir, "%s.html" % c.canonical_name())
+ index.write('<li><p><a href="%s">%s</a>&nbsp;[%s]%s</p></li>\n'
+ % (urllib_parse_quote(urlpath),
+ c.big_name(html=True),
+ c.score_str(), committerness))
+ c.html_out(revision_url_pattern, fname)
+ seen_contributors[c] = True
+ index.write('</ol>\n')
+ index.write(html_footer())
+ index.close()
+
+
+def process_committers(committers):
+ """Read from open file handle COMMITTERS, which should be in
+ the same format as the Subversion 'COMMITTERS' file. Create
+ Contributor objects based on the contents."""
+ line = committers.readline()
+ while line != 'Blanket commit access:\n':
+ line = committers.readline()
+ in_full_committers = True
+ matcher = re.compile('(\S+)\s+([^\(\)]+)\s+(\([^()]+\)){0,1}')
+ line = committers.readline()
+ while line:
+ # Every @-sign we see after this point indicates a committer line...
+ if line == 'Commit access for specific areas:\n':
+ in_full_committers = False
+ # ...except in the "dormant committers" area, which comes last anyway.
+ if line == 'Committers who have asked to be listed as dormant:\n':
+ in_full_committers = True
+ elif line.find('@') >= 0:
+ line = line.lstrip()
+ m = matcher.match(line)
+ user = m.group(1)
+ real_and_email = m.group(2).strip()
+ ignored, real, email = Contributor.parse(real_and_email)
+ c = Contributor.get(user, real, email)
+ c.is_committer = True
+ c.is_full_committer = in_full_committers
+ line = committers.readline()
+
+
+def usage():
+ print('USAGE: %s [-C COMMITTERS_FILE] < SVN_LOG_OR_LOG-V_OUTPUT' \
+ % os.path.basename(sys.argv[0]))
+ print('')
+ print('Create HTML files in the current directory, rooted at index.html,')
+ print('in which you can browse to see who contributed what.')
+ print('')
+ print('The log input should use the contribution-tracking format defined')
+ print('in http://subversion.apache.org/docs/community-guide/conventions.html#crediting.')
+ print('')
+ print('Options:')
+ print('')
+ print(' -h, -H, -?, --help Print this usage message and exit')
+ print(' -C FILE Use FILE as the COMMITTERS file')
+ print(' -U URL Use URL as a Python interpolation pattern to')
+ print(' generate URLs to link revisions to some kind')
+ print(' of web-based viewer (e.g. ViewCVS). The')
+ print(' interpolation pattern should contain exactly')
+ print(' one format specifier, \'%s\', which will be')
+ print(' replaced with the revision number.')
+ print('')
+
+
+def main():
+ try:
+ opts, args = my_getopt(sys.argv[1:], 'C:U:hH?', [ 'help' ])
+ except getopt.GetoptError as e:
+ complain(str(e) + '\n\n')
+ usage()
+ sys.exit(1)
+
+ # Parse options.
+ revision_url_pattern = None
+ for opt, value in opts:
+ if opt in ('--help', '-h', '-H', '-?'):
+ usage()
+ sys.exit(0)
+ elif opt == '-C':
+ process_committers(open(value))
+ elif opt == '-U':
+ revision_url_pattern = value
+
+ # Gather the data.
+ graze(sys.stdin)
+
+ # Output the data.
+ drop(revision_url_pattern)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/datecheck.py b/tools/dev/datecheck.py
new file mode 100755
index 0000000..c5b4caf
--- /dev/null
+++ b/tools/dev/datecheck.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+### This is a debugging script to test date-ordering in an SVN repository.
+
+'''Tell which revisions are out of order w.r.t. date in a repository.
+Takes "svn log -q -r1:HEAD" output, prints results like this:
+
+ $ svn log -q -r1:HEAD | ./datecheck.py
+ [...]
+ r42 OK 2003-06-02 22:20:31 -0500
+ r43 OK 2003-06-02 22:20:31 -0500
+ r44 OK 2003-06-02 23:29:14 -0500
+ r45 OK 2003-06-02 23:29:14 -0500
+ r46 OK 2003-06-02 23:33:13 -0500
+ r47 OK 2003-06-10 15:19:47 -0500
+ r48 NOT OK 2003-06-02 23:33:13 -0500
+ r49 OK 2003-06-10 15:19:48 -0500
+ r50 NOT OK 2003-06-02 23:33:13 -0500
+ [...]
+'''
+
+import sys
+import time
+
+log_msg_separator = "-" * 72 + "\n"
+
+line = sys.stdin.readline()
+last_date = 0
+while line:
+
+ if not line:
+ break
+
+ if line == log_msg_separator:
+ line = sys.stdin.readline()
+ continue
+
+ # We're looking at a revision line like this:
+ #
+ # "r1 | svn | 2001-08-30 23:24:14 -0500 (Thu, 30 Aug 2001)"
+ #
+ # Parse out
+
+ rev, ignored, date_full = line.split("|")
+ rev = rev.strip()
+ date_full = date_full.strip()
+
+ # We only need the machine-readable portion of the date, so ignore
+ # the parenthesized part on the end, which is meant for humans.
+
+ # Get the "2004-06-02 00:15:08" part of "2004-06-02 00:15:08 -0500".
+ date = date_full[0:19]
+ # Get the "-0500" part of "2004-06-02 00:15:08 -0500".
+ offset = date_full[20:25]
+
+ # Parse the offset by hand and adjust the date accordingly, because
+ # http://docs.python.org/lib/module-time.html doesn't seem to offer
+ # a standard way to parse "-0500", "-0600", etc, suffixes. Arggh.
+ offset_sign = offset[0:1]
+ offset_hours = int(offset[1:3])
+ offset_minutes = int(offset[3:5])
+
+ # Get a first draft of the date...
+ date_as_int = time.mktime(time.strptime(date, "%Y-%m-%d %H:%M:%S"))
+ # ... but it's still not correct, we must adjust for the offset.
+ if offset_sign == "-":
+ date_as_int -= (offset_hours * 3600)
+ date_as_int -= (offset_minutes * 60)
+ elif offset_sign == "+":
+ date_as_int += (offset_hours * 3600)
+ date_as_int += (offset_minutes * 60)
+ else:
+ sys.stderr.write("Error: unknown offset sign '%s'.\n" % offset_sign)
+ sys.exit(1)
+
+ ok_not_ok = " OK"
+ if last_date > date_as_int:
+ ok_not_ok = "NOT OK"
+
+ print("%-8s %s %s %s" % (rev, ok_not_ok, date, offset))
+ last_date = date_as_int
+ line = sys.stdin.readline()
diff --git a/tools/dev/find-bad-style.py b/tools/dev/find-bad-style.py
new file mode 100755
index 0000000..537cc3d
--- /dev/null
+++ b/tools/dev/find-bad-style.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Find places in our code where whitespace is erroneously used before
+# the open-paren on a function all. This is typically manifested like:
+#
+# return svn_some_function
+# (param1, param2, param3)
+#
+#
+# USAGE: find-bad-style.py FILE1 FILE2 ...
+#
+
+import sys
+import re
+
+re_call = re.compile(r'^\s*\(')
+re_func = re.compile(r'.*[a-z0-9_]{1,}\s*$')
+
+
+def scan_file(fname):
+ lines = open(fname).readlines()
+
+ prev = None
+ line_num = 1
+
+ for line in lines:
+ if re_call.match(line):
+ if prev and re_func.match(prev):
+ print('%s:%d:%s' % (fname, line_num - 1, prev.rstrip()))
+
+ prev = line
+ line_num += 1
+
+
+if __name__ == '__main__':
+ for fname in sys.argv[1:]:
+ scan_file(fname)
diff --git a/tools/dev/find-control-statements.py b/tools/dev/find-control-statements.py
new file mode 100755
index 0000000..1c6c3b2
--- /dev/null
+++ b/tools/dev/find-control-statements.py
@@ -0,0 +1,178 @@
+#!/usr/bin/python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Find places in our code that are part of control statements
+# i.e. "for", "if" and "while". That output is then easily
+# searched for various interesting / complex pattern.
+#
+#
+# USAGE: find-control-statements.py FILE1 FILE2 ...
+#
+
+import sys
+
+header_shown = False
+last_line_num = None
+
+def print_line(fname, line_num, line):
+ """ Print LINE of number LINE_NUM in file FNAME.
+ Show FNAME only once per file and LINE_NUM only for
+ non-consecutive lines.
+ """
+ global header_shown
+ global last_line_num
+
+ if not header_shown:
+ print('')
+ print(fname)
+ header_shown = True
+
+ if last_line_num and (last_line_num + 1 == line_num):
+ print(" %s" % line),
+ else:
+ print('%5d:%s' % (line_num, line)),
+
+ last_line_num = line_num
+
+def is_control(line, index, word):
+ """ Return whether LINE[INDEX] is actual the start position of
+ control statement WORD. It must be followed by an opening
+ parantheses and only whitespace in between WORD and the '('.
+ """
+ if index > 0:
+ if not (line[index-1] in [' ', '\t', ';']):
+ return False
+
+ index = index + len(word)
+ parantheses_index = line.find('(', index)
+ if parantheses_index == -1:
+ return False
+
+ while index < parantheses_index:
+ if not (line[index] in [' ', '\t',]):
+ return False
+
+ index += 1
+
+ return True
+
+def find_specific_control(line, control):
+ """ Return the first offset of the control statement CONTROL
+ in LINE, or -1 if it can't be found.
+ """
+ current = 0
+
+ while current != -1:
+ index = line.find(control, current)
+ if index == -1:
+ break
+
+ if is_control(line, index, control):
+ return index
+
+ current = index + len(control);
+
+ return -1
+
+def find_control(line):
+ """ Return the offset of the first control in LINE or -1
+ if there is none.
+ """
+ current = 0
+
+ for_index = find_specific_control(line, "for")
+ if_index = find_specific_control(line, "if")
+ while_index = find_specific_control(line, "while")
+
+ first = len(line)
+ if for_index >= 0 and first > for_index:
+ first = for_index
+ if if_index >= 0 and first > if_index:
+ first = if_index
+ if while_index >= 0 and first > while_index:
+ first = while_index
+
+ if first == len(line):
+ return -1
+ return first
+
+def parantheses_delta(line):
+ """ Return the number of opening minus the number of closing
+ parantheses in LINE. Don't count those inside strings or chars.
+ """
+ escaped = False
+ in_squote = False
+ in_dquote = False
+
+ delta = 0
+
+ for c in line:
+ if escaped:
+ escaped = False
+
+ elif in_dquote:
+ if c == '\\':
+ escaped = True
+ elif c == '"':
+ in_dquote = False
+
+ elif in_squote:
+ if c == '\\':
+ escaped = True
+ elif c == "'":
+ in_squote = False
+
+ elif c == '(':
+ delta += 1
+ elif c == ')':
+ delta -= 1
+ elif c == '"':
+ in_dquote = True
+ elif c == "'":
+ in_squote -= True
+
+ return delta
+
+def scan_file(fname):
+ lines = open(fname).readlines()
+
+ line_num = 1
+ parantheses_level = 0
+
+ for line in lines:
+
+ if parantheses_level > 0:
+ index = 0
+ else:
+ index = find_control(line)
+
+ if index >= 0:
+ print_line(fname, line_num, line)
+ parantheses_level += parantheses_delta(line[index:])
+
+ line_num += 1
+
+if __name__ == '__main__':
+ for fname in sys.argv[1:]:
+ header_shown = False
+ last_line_num = None
+ scan_file(fname)
diff --git a/tools/dev/find-unmoved-deprecated.sh b/tools/dev/find-unmoved-deprecated.sh
new file mode 100755
index 0000000..c689853
--- /dev/null
+++ b/tools/dev/find-unmoved-deprecated.sh
@@ -0,0 +1,36 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Find functions marked a SVN_DEPRECATED, but which have not been moved
+# to their associated deprecated.c file.
+#
+# Run this from within the subversion/include/ directory.
+#
+
+deprecated="`cat svn_*.h | fgrep -A 2 SVN_DEPRECATED | sed -n '/^svn_/s/(.*//p'`"
+for func in $deprecated ; do
+ if grep -q "${func}(" ../*/deprecated.c ; then
+ /usr/bin/true
+ else
+ echo $func was not found
+ fi
+done
diff --git a/tools/dev/fsfs-access-map.c b/tools/dev/fsfs-access-map.c
new file mode 100644
index 0000000..7f670ee
--- /dev/null
+++ b/tools/dev/fsfs-access-map.c
@@ -0,0 +1,794 @@
+/* fsfs-access-map.c -- convert strace output into FSFS access bitmap
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_string.h"
+#include "svn_io.h"
+
+#include "private/svn_string_private.h"
+
+/* The information we gather for each file. There will be one instance
+ * per file name - even if the file got deleted and re-created.
+ */
+typedef struct file_stats_t
+{
+ /* file name as found in the open() call */
+ const char *name;
+
+ /* file size as determined during the tool run. Will be 0 for
+ * files that no longer exist. However, there may still be entries
+ * in the read_map. */
+ apr_int64_t size;
+
+ /* for rev files (packed or non-packed), this will be the first revision
+ * that file. -1 for non-rev files. */
+ apr_int64_t rev_num;
+
+ /* number of times this file got opened */
+ apr_int64_t open_count;
+
+ /* number of lseek counts */
+ apr_int64_t seek_count;
+
+ /* number of lseek calls to clusters not previously read */
+ apr_int64_t uncached_seek_count;
+
+ /* number of lseek counts not followed by a read */
+ apr_int64_t unnecessary_seeks;
+
+ /* number of read() calls */
+ apr_int64_t read_count;
+
+ /* number of read() calls that returned 0 bytes */
+ apr_int64_t empty_reads;
+
+ /* total number of bytes returned by those reads */
+ apr_int64_t read_size;
+
+ /* number of clusters read */
+ apr_int64_t clusters_read;
+
+ /* number of different clusters read
+ * (i.e. number of non-zero entries in read_map). */
+ apr_int64_t unique_clusters_read;
+
+ /* cluster -> read count mapping (1 word per cluster, saturated at 64k) */
+ apr_array_header_t *read_map;
+
+} file_stats_t;
+
+/* Represents an open file handle. It refers to a file and concatenates
+ * consecutive reads such that we don't artificially hit the same cluster
+ * multiple times. Instances of this type will be reused to limit the
+ * allocation load on the lookup map.
+ */
+typedef struct handle_info_t
+{
+ /* the open file */
+ file_stats_t *file;
+
+ /* file offset at which the current series of reads started (default: 0) */
+ apr_int64_t last_read_start;
+
+ /* bytes read so far in the current series of reads started (default: 0) */
+ apr_int64_t last_read_size;
+
+ /* number of read() calls in this series */
+ apr_int64_t read_count;
+} handle_info_t;
+
+/* useful typedef */
+typedef unsigned char byte;
+typedef unsigned short word;
+
+/* an RGB color */
+typedef byte color_t[3];
+
+/* global const char * file name -> *file_info_t map */
+static apr_hash_t *files = NULL;
+
+/* global int handle -> *handle_info_t map. Entries don't get removed
+ * by close(). Instead, we simply recycle (and re-initilize) existing
+ * instances. */
+static apr_hash_t *handles = NULL;
+
+/* assume cluster size. 64 and 128kB are typical values for RAIDs. */
+static apr_int64_t cluster_size = 64 * 1024;
+
+/* Call this after a sequence of reads has been ended by either close()
+ * or lseek() for this HANDLE_INFO. This will update the read_map and
+ * unique_clusters_read members of the underlying file_info_t structure.
+ */
+static void
+store_read_info(handle_info_t *handle_info)
+{
+ if (handle_info->last_read_size)
+ {
+ apr_size_t i;
+ apr_size_t first_cluster
+ = (apr_size_t)(handle_info->last_read_start / cluster_size);
+ apr_size_t last_cluster
+ = (apr_size_t)(( handle_info->last_read_start
+ + handle_info->last_read_size
+ - 1) / cluster_size);
+
+ /* auto-expand access map in case the file later shrunk or got deleted */
+ while (handle_info->file->read_map->nelts <= last_cluster)
+ APR_ARRAY_PUSH(handle_info->file->read_map, word) = 0;
+
+ /* accumulate the accesses per cluster. Saturate and count first
+ * (i.e. disjoint) accesses clusters */
+ handle_info->file->clusters_read += last_cluster - first_cluster + 1;
+ for (i = first_cluster; i <= last_cluster; ++i)
+ {
+ word *count = &APR_ARRAY_IDX(handle_info->file->read_map, i, word);
+ if (*count == 0)
+ handle_info->file->unique_clusters_read++;
+ if (*count < 0xffff)
+ ++*count;
+ }
+ }
+ else if (handle_info->read_count == 0)
+ {
+ /* two consecutive seeks */
+ handle_info->file->unnecessary_seeks++;
+ }
+}
+
+/* Handle a open() call. Ensures that a file_info_t for the given NAME
+ * exists. Auto-create and initialize a handle_info_t for it linked to
+ * HANDLE.
+ */
+static void
+open_file(const char *name, int handle)
+{
+ file_stats_t *file = apr_hash_get(files, name, APR_HASH_KEY_STRING);
+ handle_info_t *handle_info = apr_hash_get(handles, &handle, sizeof(handle));
+
+ /* auto-create file info */
+ if (!file)
+ {
+ apr_pool_t *pool = apr_hash_pool_get(files);
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ apr_file_t *apr_file = NULL;
+ apr_finfo_t finfo = { 0 };
+ int cluster_count = 0;
+
+ /* determine file size (if file still exists) */
+ apr_file_open(&apr_file, name,
+ APR_READ | APR_BUFFERED, APR_OS_DEFAULT, subpool);
+ if (apr_file)
+ apr_file_info_get(&finfo, APR_FINFO_SIZE, apr_file);
+ svn_pool_destroy(subpool);
+
+ file = apr_pcalloc(pool, sizeof(*file));
+ file->name = apr_pstrdup(pool, name);
+ file->size = finfo.size;
+
+ /* pre-allocate cluster map accordingly
+ * (will be auto-expanded later if necessary) */
+ cluster_count = (int)(1 + (file->size - 1) / cluster_size);
+ file->read_map = apr_array_make(pool, file->size
+ ? cluster_count
+ : 1, sizeof(word));
+
+ while (file->read_map->nelts < cluster_count)
+ APR_ARRAY_PUSH(file->read_map, byte) = 0;
+
+ /* determine first revision of rev / packed rev files */
+ if (strstr(name, "/db/revs/") != NULL && strstr(name, "manifest") == NULL)
+ if (strstr(name, ".pack/pack") != NULL)
+ file->rev_num = SVN_STR_TO_REV(strstr(name, "/db/revs/") + 9);
+ else
+ file->rev_num = SVN_STR_TO_REV(strrchr(name, '/') + 1);
+ else
+ file->rev_num = -1;
+
+ /* filter out log/phys index files */
+ if (file->rev_num >= 0)
+ {
+ const char *suffix = name + strlen(name) - 4;
+ if (strcmp(suffix, ".l2p") == 0 || strcmp(suffix, ".p2l") == 0)
+ file->rev_num = -1;
+ }
+
+ apr_hash_set(files, file->name, APR_HASH_KEY_STRING, file);
+ }
+
+ file->open_count++;
+
+ /* auto-create handle instance */
+ if (!handle_info)
+ {
+ apr_pool_t *pool = apr_hash_pool_get(handles);
+ int *key = apr_palloc(pool, sizeof(*key));
+ *key = handle;
+
+ handle_info = apr_pcalloc(pool, sizeof(*handle_info));
+ apr_hash_set(handles, key, sizeof(*key), handle_info);
+ }
+
+ /* link handle to file */
+ handle_info->file = file;
+ handle_info->last_read_start = 0;
+ handle_info->last_read_size = 0;
+}
+
+/* COUNT bytes have been read from file with the given HANDLE.
+ */
+static void
+read_file(int handle, apr_int64_t count)
+{
+ handle_info_t *handle_info = apr_hash_get(handles, &handle, sizeof(handle));
+ if (handle_info)
+ {
+ /* known file handle -> expand current read sequence */
+
+ handle_info->read_count++;
+ handle_info->last_read_size += count;
+ handle_info->file->read_count++;
+ handle_info->file->read_size += count;
+
+ if (count == 0)
+ handle_info->file->empty_reads++;
+ }
+}
+
+/* Seek to offset LOCATION in file given by HANDLE.
+ */
+static void
+seek_file(int handle, apr_int64_t location)
+{
+ handle_info_t *handle_info = apr_hash_get(handles, &handle, sizeof(handle));
+ if (handle_info)
+ {
+ /* known file handle -> end current read sequence and start a new one */
+
+ apr_size_t cluster = (apr_size_t)(location / cluster_size);
+
+ store_read_info(handle_info);
+
+ handle_info->last_read_size = 0;
+ handle_info->last_read_start = location;
+ handle_info->read_count = 0;
+ handle_info->file->seek_count++;
+
+ /* if we seek to a location that had not been read from before,
+ * there will probably be a real I/O seek on the following read.
+ */
+ if ( handle_info->file->read_map->nelts <= cluster
+ || APR_ARRAY_IDX(handle_info->file->read_map, cluster, word) == 0)
+ handle_info->file->uncached_seek_count++;
+ }
+}
+
+/* The given file HANDLE has been closed.
+ */
+static void
+close_file(int handle)
+{
+ /* for known file handles, end current read sequence */
+
+ handle_info_t *handle_info = apr_hash_get(handles, &handle, sizeof(handle));
+ if (handle_info)
+ store_read_info(handle_info);
+}
+
+/* Parse / process non-empty the LINE from an strace output.
+ */
+static void
+parse_line(svn_stringbuf_t *line)
+{
+ /* determine function name, first parameter and return value */
+ char *func_end = strchr(line->data, '(');
+ char *return_value = strrchr(line->data, ' ');
+ char *first_param_end;
+ apr_int64_t func_return = 0;
+ char *func_start = strchr(line->data, ' ');
+
+ if (func_end == NULL || return_value == NULL)
+ return;
+
+ if (func_start == NULL || func_start > func_end)
+ func_start = line->data;
+ else
+ while(*func_start == ' ')
+ func_start++;
+
+ first_param_end = strchr(func_end, ',');
+ if (first_param_end == NULL)
+ first_param_end = strchr(func_end, ')');
+
+ if (first_param_end == NULL)
+ return;
+
+ *func_end++ = 0;
+ *first_param_end = 0;
+ ++return_value;
+
+ /* (try to) convert the return value into an integer.
+ * If that fails, continue anyway as defaulting to 0 will be safe for us. */
+ svn_error_clear(svn_cstring_atoi64(&func_return, return_value));
+
+ /* process those operations that we care about */
+ if (strcmp(func_start, "open") == 0)
+ {
+ /* remove double quotes from file name parameter */
+ *func_end++ = 0;
+ *--first_param_end = 0;
+
+ open_file(func_end, (int)func_return);
+ }
+ else if (strcmp(func_start, "read") == 0)
+ read_file(atoi(func_end), func_return);
+ else if (strcmp(func_start, "lseek") == 0)
+ seek_file(atoi(func_end), func_return);
+ else if (strcmp(func_start, "close") == 0)
+ close_file(atoi(func_end));
+}
+
+/* Process the strace output stored in FILE.
+ */
+static void
+parse_file(apr_file_t *file)
+{
+ apr_pool_t *pool = svn_pool_create(NULL);
+ apr_pool_t *iterpool = svn_pool_create(pool);
+
+ /* limit lines to 4k (usually, we need less than 200 bytes) */
+ svn_stringbuf_t *line = svn_stringbuf_create_ensure(4096, pool);
+
+ do
+ {
+ svn_error_t *err = NULL;
+
+ line->len = line->blocksize-1;
+ err = svn_io_read_length_line(file, line->data, &line->len, iterpool);
+ svn_error_clear(err);
+ if (err)
+ break;
+
+ parse_line(line);
+ svn_pool_clear(iterpool);
+ }
+ while (line->len > 0);
+}
+
+/* qsort() callback. Sort files by revision number.
+ */
+static int
+compare_files(file_stats_t **lhs, file_stats_t **rhs)
+{
+ return (*lhs)->rev_num < (*rhs)->rev_num;
+}
+
+/* Return all rev (and packed rev) files sorted by revision number.
+ * Allocate the result in POOL.
+ */
+static apr_array_header_t *
+get_rev_files(apr_pool_t *pool)
+{
+ apr_hash_index_t *hi;
+ apr_array_header_t *result = apr_array_make(pool,
+ apr_hash_count(files),
+ sizeof(file_stats_t *));
+
+ /* select all files that have a rev number */
+ for (hi = apr_hash_first(pool, files); hi; hi = apr_hash_next(hi))
+ {
+ const char *name = NULL;
+ apr_ssize_t len = 0;
+ file_stats_t *file = NULL;
+
+ apr_hash_this(hi, (const void **)&name, &len, (void**)&file);
+ if (file->rev_num >= 0)
+ APR_ARRAY_PUSH(result, file_stats_t *) = file;
+ }
+
+ /* sort them */
+ qsort(result->elts, result->nelts, result->elt_size,
+ (int (*)(const void *, const void *))compare_files);
+
+ /* return the result */
+ return result;
+}
+
+/* store VALUE to DEST in little-endian format. Assume that the target
+ * buffer is filled with 0.
+ */
+static void
+write_number(byte *dest, int value)
+{
+ while (value)
+ {
+ *dest = (byte)(value % 256);
+ value /= 256;
+ ++dest;
+ }
+}
+
+/* Return a linearly interpolated y value for X with X0 <= X <= X1 and
+ * the corresponding Y0 and Y1 values.
+ */
+static int
+interpolate(int y0, int x0, int y1, int x1, int x)
+{
+ return y0 + ((y1 - y0) * (x - x0)) / (x1 - x0);
+}
+
+/* Return the BMP-encoded 24 bit COLOR for the given value.
+ */
+static void
+select_color(byte color[3], word value)
+{
+ enum { COLOR_COUNT = 10 };
+
+ /* value -> color table. Missing values get interpolated.
+ * { count, B - G - R } */
+ word table[COLOR_COUNT][4] =
+ {
+ { 0, 255, 255, 255 }, /* unread -> white */
+ { 1, 64, 128, 0 }, /* read once -> turquoise */
+ { 2, 0, 128, 0 }, /* twice -> green */
+ { 8, 0, 192, 192 }, /* 8x -> yellow */
+ { 64, 0, 0, 192 }, /* 64x -> red */
+ { 256, 64, 32, 230 }, /* 256x -> bright red */
+ { 512, 192, 0, 128 }, /* 512x -> purple */
+ { 1024, 96, 32, 96 }, /* 1024x -> UV purple */
+ { 4096, 32, 16, 32 }, /* 4096x -> EUV purple */
+ { 65535, 0, 0, 0 } /* max -> black */
+ };
+
+ /* find upper limit entry for value */
+ int i;
+ for (i = 0; i < COLOR_COUNT; ++i)
+ if (table[i][0] >= value)
+ break;
+
+ /* exact match? */
+ if (table[i][0] == value)
+ {
+ color[0] = (byte)table[i][1];
+ color[1] = (byte)table[i][2];
+ color[2] = (byte)table[i][3];
+ }
+ else
+ {
+ /* interpolate */
+ color[0] = (byte)interpolate(table[i-1][1], table[i-1][0],
+ table[i][1], table[i][0],
+ value);
+ color[1] = (byte)interpolate(table[i-1][2], table[i-1][0],
+ table[i][2], table[i][0],
+ value);
+ color[2] = (byte)interpolate(table[i-1][3], table[i-1][0],
+ table[i][3], table[i][0],
+ value);
+ }
+}
+
+/* Writes a BMP image header to FILE for a 24-bit color picture of the
+ * given XSIZE and YSIZE dimension.
+ */
+static void
+write_bitmap_header(apr_file_t *file, int xsize, int ysize)
+{
+ /* BMP file header (some values need to filled in later)*/
+ byte header[54] =
+ {
+ 'B', 'M', /* magic */
+ 0, 0, 0, 0, /* file size (to be written later) */
+ 0, 0, 0, 0, /* reserved, unused */
+ 54, 0, 0, 0, /* pixel map starts at offset 54dec */
+
+ 40, 0, 0, 0, /* DIB header has 40 bytes */
+ 0, 0, 0, 0, /* x size in pixel */
+ 0, 0, 0, 0, /* y size in pixel */
+ 1, 0, /* 1 color plane */
+ 24, 0, /* 24 bits / pixel */
+ 0, 0, 0, 0, /* no pixel compression used */
+ 0, 0, 0, 0, /* size of pixel array (to be written later) */
+ 0xe8, 3, 0, 0, /* 1 pixel / mm */
+ 0xe8, 3, 0, 0, /* 1 pixel / mm */
+ 0, 0, 0, 0, /* no colors in palette */
+ 0, 0, 0, 0 /* no colors to import */
+ };
+
+ apr_size_t written;
+
+ /* rows in BMP files must be aligned to 4 bytes */
+ int row_size = APR_ALIGN(xsize * 3, 4);
+
+ /* write numbers to header */
+ write_number(header + 2, ysize * row_size + 54);
+ write_number(header + 18, xsize);
+ write_number(header + 22, ysize);
+ write_number(header + 38, ysize * row_size);
+
+ /* write header to file */
+ written = sizeof(header);
+ apr_file_write(file, header, &written);
+}
+
+/* To COLOR, add the fractional value of SOURCE from fractional indexes
+ * SOURCE_START to SOURCE_END and apply the SCALING_FACTOR.
+ */
+static void
+add_sample(color_t color,
+ color_t *source,
+ double source_start,
+ double source_end,
+ double scaling_factor)
+{
+ double factor = (source_end - source_start) / scaling_factor;
+
+ apr_size_t i;
+ for (i = 0; i < sizeof(color_t) / sizeof(*color); ++i)
+ color[i] += (source_end - source_start < 0.5) && source_start > 1.0
+ ? factor * source[(apr_size_t)source_start - 1][i]
+ : factor * source[(apr_size_t)source_start][i];
+}
+
+/* Scale the IN_LEN RGB values from IN to OUT_LEN RGB values in OUT.
+ */
+static void
+scale_line(color_t* out,
+ int out_len,
+ color_t *in,
+ int in_len)
+{
+ double scaling_factor = (double)(in_len) / (double)(out_len);
+
+ apr_size_t i;
+ memset(out, 0, out_len * sizeof(color_t));
+ for (i = 0; i < out_len; ++i)
+ {
+ color_t color = { 0 };
+
+ double source_start = i * scaling_factor;
+ double source_end = (i + 1) * scaling_factor;
+
+ if ((apr_size_t)source_start == (apr_size_t)source_end)
+ {
+ add_sample(color, in, source_start, source_end, scaling_factor);
+ }
+ else
+ {
+ apr_size_t k;
+ apr_size_t first_sample_end = (apr_size_t)source_start + 1;
+ apr_size_t last_sample_start = (apr_size_t)source_end;
+
+ add_sample(color, in, source_start, first_sample_end, scaling_factor);
+ for (k = first_sample_end; k < last_sample_start; ++k)
+ add_sample(color, in, k, k + 1, scaling_factor);
+
+ add_sample(color, in, last_sample_start, source_end, scaling_factor);
+ }
+
+ memcpy(out[i], color, sizeof(color));
+ }
+}
+
+/* Write the cluster read map for all files in INFO as BMP image to FILE.
+ * If MAX_X is not 0, scale all lines to MAX_X pixels. Use POOL for
+ * allocations.
+ */
+static void
+write_bitmap(apr_array_header_t *info,
+ int max_x,
+ apr_file_t *file,
+ apr_pool_t *pool)
+{
+ int ysize = info->nelts;
+ int xsize = 0;
+ int x, y;
+ apr_size_t row_size;
+ apr_size_t written;
+ color_t *line, *scaled_line;
+ svn_boolean_t do_scale = max_x > 0;
+
+ /* xsize = max cluster number */
+ for (y = 0; y < ysize; ++y)
+ if (xsize < APR_ARRAY_IDX(info, y, file_stats_t *)->read_map->nelts)
+ xsize = APR_ARRAY_IDX(info, y, file_stats_t *)->read_map->nelts;
+
+ /* limit picture dimensions (16k pixels in each direction) */
+ if (xsize >= 0x4000)
+ xsize = 0x3fff;
+ if (ysize >= 0x4000)
+ ysize = 0x3fff;
+ if (max_x == 0)
+ max_x = xsize;
+
+ /* rows in BMP files must be aligned to 4 bytes */
+ row_size = APR_ALIGN(max_x * sizeof(color_t), 4);
+
+ /**/
+ line = apr_pcalloc(pool, xsize * sizeof(color_t));
+ scaled_line = apr_pcalloc(pool, row_size);
+
+ /* write header to file */
+ write_bitmap_header(file, max_x, ysize);
+
+ /* write all rows */
+ for (y = 0; y < ysize; ++y)
+ {
+ file_stats_t *file_info = APR_ARRAY_IDX(info, y, file_stats_t *);
+ int block_count = file_info->read_map->nelts;
+ for (x = 0; x < xsize; ++x)
+ {
+ color_t color = { 128, 128, 128 };
+ if (x < block_count)
+ {
+ word count = APR_ARRAY_IDX(file_info->read_map, x, word);
+ select_color(color, count);
+ }
+
+ memcpy(line[x], color, sizeof(color));
+ }
+
+ scale_line(scaled_line, max_x, line, block_count ? block_count : 1);
+
+ written = row_size;
+ apr_file_write(file, do_scale ? scaled_line : line, &written);
+ }
+}
+
+/* write a color bar with (roughly) logarithmic scale as BMP image to FILE.
+ */
+static void
+write_scale(apr_file_t *file)
+{
+ int x;
+ word value = 0, inc = 1;
+
+ /* write header to file */
+ write_bitmap_header(file, 64, 1);
+
+ for (x = 0; x < 64; ++x)
+ {
+ apr_size_t written;
+ byte color[3] = { 128, 128, 128 };
+
+ select_color(color, value);
+ if (value + (int)inc < 0x10000)
+ {
+ value += inc;
+ if (value >= 8 * inc)
+ inc *= 2;
+ }
+
+ written = sizeof(color);
+ apr_file_write(file, color, &written);
+ }
+}
+
+/* Write a summary of the I/O ops to stdout.
+ * Use POOL for temporaries.
+ */
+static void
+print_stats(apr_pool_t *pool)
+{
+ apr_int64_t open_count = 0;
+ apr_int64_t seek_count = 0;
+ apr_int64_t read_count = 0;
+ apr_int64_t read_size = 0;
+ apr_int64_t clusters_read = 0;
+ apr_int64_t unique_clusters_read = 0;
+ apr_int64_t uncached_seek_count = 0;
+ apr_int64_t unnecessary_seek_count = 0;
+ apr_int64_t empty_read_count = 0;
+
+ apr_hash_index_t *hi;
+ for (hi = apr_hash_first(pool, files); hi; hi = apr_hash_next(hi))
+ {
+ const char *name = NULL;
+ apr_ssize_t len = 0;
+ file_stats_t *file = NULL;
+
+ apr_hash_this(hi, (const void **)&name, &len, (void**)&file);
+
+ open_count += file->open_count;
+ seek_count += file->seek_count;
+ read_count += file->read_count;
+ read_size += file->read_size;
+ clusters_read += file->clusters_read;
+ unique_clusters_read += file->unique_clusters_read;
+ uncached_seek_count += file->uncached_seek_count;
+ unnecessary_seek_count += file->unnecessary_seeks;
+ empty_read_count += file->empty_reads;
+ }
+
+ printf("%20s files\n", svn__i64toa_sep(apr_hash_count(files), ',', pool));
+ printf("%20s files opened\n", svn__i64toa_sep(open_count, ',', pool));
+ printf("%20s seeks\n", svn__i64toa_sep(seek_count, ',', pool));
+ printf("%20s unnecessary seeks\n", svn__i64toa_sep(unnecessary_seek_count, ',', pool));
+ printf("%20s uncached seeks\n", svn__i64toa_sep(uncached_seek_count, ',', pool));
+ printf("%20s reads\n", svn__i64toa_sep(read_count, ',', pool));
+ printf("%20s empty reads\n", svn__i64toa_sep(empty_read_count, ',', pool));
+ printf("%20s unique clusters read\n", svn__i64toa_sep(unique_clusters_read, ',', pool));
+ printf("%20s clusters read\n", svn__i64toa_sep(clusters_read, ',', pool));
+ printf("%20s bytes read\n", svn__i64toa_sep(read_size, ',', pool));
+}
+
+/* Some help output. */
+static void
+print_usage(void)
+{
+ printf("fsfs-access-map <file>\n\n");
+ printf("Reads strace of some FSFS-based tool from <file>, prints some stats\n");
+ printf("and writes a cluster access map to 'access.bmp' the current folder.\n");
+ printf("Each pixel corresponds to one 64kB cluster and every line to a rev\n");
+ printf("or packed rev file in the repository. Turquoise and green indicate\n");
+ printf("1 and 2 hits, yellow to read-ish colors for up to 20, shares of\n");
+ printf("for up to 100 and black for > 200 hits.\n\n");
+ printf("A typical strace invocation looks like this:\n");
+ printf("strace -e trace=open,close,read,lseek -o strace.txt svn log ...\n");
+}
+
+/* linear control flow */
+int main(int argc, const char *argv[])
+{
+ apr_pool_t *pool = NULL;
+ apr_file_t *file = NULL;
+
+ apr_initialize();
+ atexit(apr_terminate);
+
+ pool = svn_pool_create(NULL);
+ files = apr_hash_make(pool);
+ handles = apr_hash_make(pool);
+
+ if (argc == 2)
+ apr_file_open(&file, argv[1], APR_READ | APR_BUFFERED, APR_OS_DEFAULT,
+ pool);
+ if (file == NULL)
+ {
+ print_usage();
+ return 0;
+ }
+ parse_file(file);
+ apr_file_close(file);
+
+ print_stats(pool);
+
+ apr_file_open(&file, "access.bmp",
+ APR_WRITE | APR_CREATE | APR_TRUNCATE | APR_BUFFERED,
+ APR_OS_DEFAULT, pool);
+ write_bitmap(get_rev_files(pool), 0, file, pool);
+ apr_file_close(file);
+
+ apr_file_open(&file, "access_scaled.bmp",
+ APR_WRITE | APR_CREATE | APR_TRUNCATE | APR_BUFFERED,
+ APR_OS_DEFAULT, pool);
+ write_bitmap(get_rev_files(pool), 1024, file, pool);
+ apr_file_close(file);
+
+ apr_file_open(&file, "scale.bmp",
+ APR_WRITE | APR_CREATE | APR_TRUNCATE | APR_BUFFERED,
+ APR_OS_DEFAULT, pool);
+ write_scale(file);
+ apr_file_close(file);
+
+ return 0;
+}
diff --git a/tools/dev/gdb-py/README b/tools/dev/gdb-py/README
new file mode 100644
index 0000000..38133f1
--- /dev/null
+++ b/tools/dev/gdb-py/README
@@ -0,0 +1,29 @@
+This directory includes a Python module which will integrate with gdb which
+can be used to pretty-print various Subversion types. For additional
+information about gdb pretty-printing, see:
+
+ http://sourceware.org/gdb/onlinedocs/gdb/Pretty-Printing.html
+
+
+How to Use
+----------
+To enable pretty printing of selected Subversion types, put the following code
+in your ~/.gdbinit:
+
+[[[
+python
+import sys, os.path
+sys.path.insert(0, os.path.expanduser('~/dev/svn-trunk/tools/dev/gdb-py'))
+from svndbg.printers import register_libsvn_printers
+register_libsvn_printers(None)
+end
+]]]
+
+Change the path to point to the correct location on your platform for the
+gdb-py directory, and then load gdb. Everything should Just Work.
+(I believe this requires gdb >= 7.0, but earlier versions may also work.)
+
+The list of currently supported types for pretty printing is a bit lacking,
+so should you run into a type which could be useful to be pretty printed,
+read the documentation referenced above and follow the existing examples
+to extend the pretty-printing support. Enjoy!
diff --git a/tools/dev/gdb-py/svndbg/__init__.py b/tools/dev/gdb-py/svndbg/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/dev/gdb-py/svndbg/__init__.py
diff --git a/tools/dev/gdb-py/svndbg/printers.py b/tools/dev/gdb-py/svndbg/printers.py
new file mode 100644
index 0000000..f1ee085
--- /dev/null
+++ b/tools/dev/gdb-py/svndbg/printers.py
@@ -0,0 +1,417 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import gdb
+import re
+
+import gdb.printing
+from gdb.printing import RegexpCollectionPrettyPrinter
+
+
+class TypedefRegexCollectionPrettyPrinter(RegexpCollectionPrettyPrinter):
+ """Class for implementing a collection of pretty-printers, matching the
+ type name to a regular expression.
+
+ A pretty-printer in this collection will be used if the type of the
+ value to be printed matches the printer's regular expression, or if
+ the value is a pointer to and/or typedef to a type name that matches
+ its regular expression. The variations are tried in this order:
+
+ 1. the type name as known to the debugger (could be a 'typedef');
+ 2. the type after stripping off any number of layers of 'typedef';
+ 3. if it is a pointer, the pointed-to type;
+ 4. if it is a pointer, the pointed-to type minus some 'typedef's.
+
+ In all cases, ignore 'const' and 'volatile' qualifiers. When
+ matching the pointed-to type, dereference the value or use 'None' if
+ the value was a null pointer.
+
+ This class is modeled on RegexpCollectionPrettyPrinter, which (in GDB
+ 7.3) matches on the base type's tag name and can't match a pointer
+ type or any other type that doesn't have a tag name.
+ """
+
+ def __init__(self, name):
+ super(TypedefRegexCollectionPrettyPrinter, self).__init__(name)
+
+ def __call__(self, val):
+ """Find and return an instantiation of a printer for VAL.
+ """
+
+ def lookup_type(type, val):
+ """Return the first printer whose regular expression matches the
+ name (tag name for struct/union/enum types) of TYPE, ignoring
+ any 'const' or 'volatile' qualifiers.
+
+ VAL is a gdb.Value, or may be None to indicate a dereferenced
+ null pointer. TYPE is the associated gdb.Type.
+ """
+ if type.code in [gdb.TYPE_CODE_STRUCT, gdb.TYPE_CODE_UNION,
+ gdb.TYPE_CODE_ENUM]:
+ typename = type.tag
+ else:
+ typename = str(type.unqualified())
+ for printer in self.subprinters:
+ if printer.enabled and printer.compiled_re.search(typename):
+ return printer.gen_printer(val)
+
+ def lookup_type_or_alias(type, val):
+ """Return the first printer matching TYPE, or else if TYPE is a
+ typedef then the first printer matching the aliased type.
+
+ VAL is a gdb.Value, or may be None to indicate a dereferenced
+ null pointer. TYPE is the associated gdb.Type.
+ """
+ # First, look for a printer for the given (but unqualified) type.
+ printer = lookup_type(type, val)
+ if printer:
+ return printer
+
+ # If it's a typedef, look for a printer for the aliased type ...
+ while type.code == gdb.TYPE_CODE_TYPEDEF:
+ type = type.target()
+ printer = lookup_type(type, val)
+ if printer:
+ return printer
+
+ # First, look for a printer for the given (but unqualified) type, or
+ # its aliased type if it's a typedef.
+ printer = lookup_type_or_alias(val.type, val)
+ if printer:
+ return printer
+
+ # If it's a pointer, look for a printer for the pointed-to type.
+ if val.type.code == gdb.TYPE_CODE_PTR:
+ type = val.type.target()
+ printer = lookup_type_or_alias(
+ type, val and val.dereference() or None)
+ if printer:
+ return printer
+
+ # Cannot find a matching pretty printer in this collection.
+ return None
+
+class InferiorFunction:
+ """A class whose instances are callable functions on the inferior
+ process.
+ """
+ def __init__(self, function_name):
+ self.function_name = function_name
+ self.func = None
+
+ def __call__(self, *args):
+ if not self.func:
+ self.func = gdb.parse_and_eval(self.function_name)
+ return self.func(*args)
+
+def children_as_map(children_iterator):
+ """Convert an iteration of (key, value) pairs into the form required for
+ a pretty-printer 'children' method when the display-hint is 'map'.
+ """
+ for k, v in children_iterator:
+ yield 'key', k
+ yield 'val', v
+
+
+########################################################################
+
+# Pretty-printing for APR library types.
+
+# Some useful gdb.Type instances that can be initialized before any object
+# files are loaded.
+pvoidType = gdb.lookup_type('void').pointer()
+cstringType = gdb.lookup_type('char').pointer()
+
+# Some functions that resolve to calls into the inferior process.
+apr_hash_count = InferiorFunction('apr_hash_count')
+apr_hash_first = InferiorFunction('apr_hash_first')
+apr_hash_next = InferiorFunction('apr_hash_next')
+apr_hash_this_key = InferiorFunction('apr_hash_this_key')
+apr_hash_this_val = InferiorFunction('apr_hash_this_val')
+
+def children_of_apr_hash(hash_p, value_type=None):
+ """Iterate over an 'apr_hash_t *' GDB value, in the way required for a
+ pretty-printer 'children' method when the display-hint is 'map'.
+ Cast the value pointers to VALUE_TYPE, or return values as '...' if
+ VALUE_TYPE is None.
+ """
+ hi = apr_hash_first(0, hash_p)
+ while (hi):
+ k = apr_hash_this_key(hi).reinterpret_cast(cstringType)
+ if value_type:
+ val = apr_hash_this_val(hi).reinterpret_cast(value_type)
+ else:
+ val = '...'
+ try:
+ key = k.string()
+ except:
+ key = '<unreadable>'
+ yield key, val
+ hi = apr_hash_next(hi)
+
+class AprHashPrinter:
+ """for 'apr_hash_t' of 'char *' keys and unknown values"""
+ def __init__(self, val):
+ if val:
+ self.hash_p = val.address
+ else:
+ self.hash_p = val
+
+ def to_string(self):
+ """Return a string to be displayed before children are displayed, or
+ return None if we don't want any such.
+ """
+ if not self.hash_p:
+ return 'NULL'
+ return 'hash of ' + str(apr_hash_count(self.hash_p)) + ' items'
+
+ def children(self):
+ if not self.hash_p:
+ return []
+ return children_as_map(children_of_apr_hash(self.hash_p))
+
+ def display_hint(self):
+ return 'map'
+
+def children_of_apr_array(array, value_type):
+ """Iterate over an 'apr_array_header_t' GDB value, in the way required for
+ a pretty-printer 'children' method when the display-hint is 'array'.
+ Cast the values to VALUE_TYPE.
+ """
+ nelts = int(array['nelts'])
+ elts = array['elts'].reinterpret_cast(value_type.pointer())
+ for i in range(nelts):
+ yield str(i), elts[i]
+
+class AprArrayPrinter:
+ """for 'apr_array_header_t' of unknown elements"""
+ def __init__(self, val):
+ self.array = val
+
+ def to_string(self):
+ if not self.array:
+ return 'NULL'
+ nelts = self.array['nelts']
+ return 'array of ' + str(int(nelts)) + ' items'
+
+ def children(self):
+ # We can't display the children as we don't know their type.
+ return []
+
+ def display_hint(self):
+ return 'array'
+
+########################################################################
+
+# Pretty-printing for Subversion libsvn_subr types.
+
+class SvnBooleanPrinter:
+ """for svn_boolean_t"""
+ def __init__(self, val):
+ self.val = val
+
+ def to_string(self):
+ if self.val is None:
+ return '(NULL)'
+ if self.val:
+ return 'TRUE'
+ else:
+ return 'FALSE'
+
+class SvnStringPrinter:
+ """for svn_string_t"""
+ def __init__(self, val):
+ self.val = val
+
+ def to_string(self):
+ if not self.val:
+ return 'NULL'
+
+ data = self.val['data']
+ len = int(self.val['len'])
+ return data.string(length=len)
+
+ def display_hint(self):
+ if self.val:
+ return 'string'
+
+class SvnMergeRangePrinter:
+ """for svn_merge_range_t"""
+ def __init__(self, val):
+ self.val = val
+
+ def to_string(self):
+ if not self.val:
+ return 'NULL'
+
+ r = self.val
+ start = int(r['start'])
+ end = int(r['end'])
+ if start >= 0 and start < end:
+ if start + 1 == end:
+ rs = str(end)
+ else:
+ rs = str(start + 1) + '-' + str(end)
+ elif end >= 0 and end < start:
+ if start == end + 1:
+ rs = '-' + str(start)
+ else:
+ rs = str(start) + '-' + str(end + 1)
+ else:
+ rs = '(INVALID: s=%d, e=%d)' % (start, end)
+ if not r['inheritable']:
+ rs += '*'
+ return rs
+
+ def display_hint(self):
+ if self.val:
+ return 'string'
+
+class SvnRangelistPrinter:
+ """for svn_rangelist_t"""
+ def __init__(self, val):
+ self.array = val
+ self.svn_merge_range_t = gdb.lookup_type('svn_merge_range_t')
+
+ def to_string(self):
+ if not self.array:
+ return 'NULL'
+
+ s = ''
+ for key, val in children_of_apr_array(self.array,
+ self.svn_merge_range_t.pointer()):
+ if s:
+ s += ','
+ s += SvnMergeRangePrinter(val).to_string()
+ return s
+
+ def display_hint(self):
+ if self.array:
+ return 'string'
+
+class SvnMergeinfoPrinter:
+ """for svn_mergeinfo_t"""
+ def __init__(self, val):
+ self.hash_p = val
+ self.svn_rangelist_t = gdb.lookup_type('svn_rangelist_t')
+
+ def to_string(self):
+ if self.hash_p == 0:
+ return 'NULL'
+
+ s = ''
+ for key, val in children_of_apr_hash(self.hash_p,
+ self.svn_rangelist_t.pointer()):
+ if s:
+ s += '; '
+ s += key + ':' + SvnRangelistPrinter(val).to_string()
+ return '{ ' + s + ' }'
+
+class SvnMergeinfoCatalogPrinter:
+ """for svn_mergeinfo_catalog_t"""
+ def __init__(self, val):
+ self.hash_p = val
+ self.svn_mergeinfo_t = gdb.lookup_type('svn_mergeinfo_t')
+
+ def to_string(self):
+ if self.hash_p == 0:
+ return 'NULL'
+
+ s = ''
+ for key, val in children_of_apr_hash(self.hash_p,
+ self.svn_mergeinfo_t):
+ if s:
+ s += ',\n '
+ s += "'" + key + "': " + SvnMergeinfoPrinter(val).to_string()
+ return '{ ' + s + ' }'
+
+########################################################################
+
+# Pretty-printing for Subversion libsvn_client types.
+
+class SvnPathrevPrinter:
+ """for svn_client__pathrev_t"""
+ def __init__(self, val):
+ self.val = val
+
+ def to_string(self):
+ if not self.val:
+ return 'NULL'
+
+ rev = int(self.val['rev'])
+ url = self.val['url'].string()
+ repos_root_url = self.val['repos_root_url'].string()
+ relpath = url[len(repos_root_url):]
+ return "%s@%d" % (relpath, rev)
+
+ def display_hint(self):
+ if self.val:
+ return 'string'
+
+
+########################################################################
+
+libapr_printer = None
+libsvn_printer = None
+
+def build_libsvn_printers():
+ """Construct the pretty-printer objects."""
+
+ global libapr_printer, libsvn_printer
+
+ libapr_printer = TypedefRegexCollectionPrettyPrinter("libapr")
+ libapr_printer.add_printer('apr_hash_t', r'^apr_hash_t$',
+ AprHashPrinter)
+ libapr_printer.add_printer('apr_array_header_t', r'^apr_array_header_t$',
+ AprArrayPrinter)
+
+ libsvn_printer = TypedefRegexCollectionPrettyPrinter("libsvn")
+ libsvn_printer.add_printer('svn_boolean_t', r'^svn_boolean_t$',
+ SvnBooleanPrinter)
+ libsvn_printer.add_printer('svn_string_t', r'^svn_string_t$',
+ SvnStringPrinter)
+ libsvn_printer.add_printer('svn_client__pathrev_t', r'^svn_client__pathrev_t$',
+ SvnPathrevPrinter)
+ libsvn_printer.add_printer('svn_merge_range_t', r'^svn_merge_range_t$',
+ SvnMergeRangePrinter)
+ libsvn_printer.add_printer('svn_rangelist_t', r'^svn_rangelist_t$',
+ SvnRangelistPrinter)
+ libsvn_printer.add_printer('svn_mergeinfo_t', r'^svn_mergeinfo_t$',
+ SvnMergeinfoPrinter)
+ libsvn_printer.add_printer('svn_mergeinfo_catalog_t', r'^svn_mergeinfo_catalog_t$',
+ SvnMergeinfoCatalogPrinter)
+
+
+def register_libsvn_printers(obj):
+ """Register the pretty-printers for the object file OBJ."""
+
+ global libapr_printer, libsvn_printer
+
+ # Printers registered later take precedence.
+ gdb.printing.register_pretty_printer(obj, libapr_printer)
+ gdb.printing.register_pretty_printer(obj, libsvn_printer)
+
+
+# Construct the pretty-printer objects, once, at GDB start-up time when this
+# Python module is loaded. (Registration happens later, once per object
+# file.)
+build_libsvn_printers()
diff --git a/tools/dev/gen-javahl-errors.py b/tools/dev/gen-javahl-errors.py
new file mode 100755
index 0000000..c949d4a
--- /dev/null
+++ b/tools/dev/gen-javahl-errors.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+#
+# gen-javahl-errors.py: Generate a Java class containing an enum for the
+# C error codes
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+
+import sys, os
+
+try:
+ from svn import core
+except ImportError as e:
+ sys.stderr.write("ERROR: Unable to import Subversion's Python bindings: '%s'\n" \
+ "Hint: Set your PYTHONPATH environment variable, or adjust your " \
+ "PYTHONSTARTUP\nfile to point to your Subversion install " \
+ "location's svn-python directory.\n" % e)
+ sys.stderr.flush()
+ sys.exit(1)
+
+def get_errors():
+ errs = {}
+ for key in vars(core):
+ if key.find('SVN_ERR_') == 0:
+ try:
+ val = int(vars(core)[key])
+ errs[val] = key
+ except:
+ pass
+ return errs
+
+def gen_javahl_class(error_codes, output_filename):
+ jfile = open(output_filename, 'w')
+ jfile.write(
+"""/** ErrorCodes.java - This file is autogenerated by gen-javahl-errors.py
+ */
+
+package org.tigris.subversion.javahl;
+
+/**
+ * Provide mappings from error codes generated by the C runtime to meaningful
+ * Java values. For a better description of each error, please see
+ * svn_error_codes.h in the C source.
+ */
+public class ErrorCodes
+{
+""")
+
+ keys = sorted(error_codes.keys())
+
+ for key in keys:
+ # Format the code name to be more Java-esque
+ code_name = error_codes[key][8:].replace('_', ' ').title().replace(' ', '')
+ code_name = code_name[0].lower() + code_name[1:]
+
+ jfile.write(" public static final int %s = %d;\n" % (code_name, key))
+
+ jfile.write("}\n")
+ jfile.close()
+
+if __name__ == "__main__":
+ if len(sys.argv) > 1:
+ output_filename = sys.argv[1]
+ else:
+ output_filename = os.path.join('..', '..', 'subversion', 'bindings',
+ 'javahl', 'src', 'org', 'tigris',
+ 'subversion', 'javahl', 'ErrorCodes.java')
+
+ gen_javahl_class(get_errors(), output_filename)
diff --git a/tools/dev/gen-py-errors.py b/tools/dev/gen-py-errors.py
new file mode 100755
index 0000000..266e04b
--- /dev/null
+++ b/tools/dev/gen-py-errors.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#
+# gen-py-errors.py: Generate a python module which maps error names to numbers.
+# (The purpose being easier writing of the python tests.)
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+#
+# Locates svn_error_codes.h based on its relative location to this script.
+#
+# Generates to STDOUT. Typically, redirect this into svntest/err.py
+#
+
+import sys
+import os
+import re
+
+HEADER = '''#!/usr/bin/env python
+### This file automatically generated by tools/dev/gen-py-errors.py,
+### which see for more information
+###
+### It is versioned for convenience.
+'''
+
+# Established by svn 1.0. May as well hard-code these.
+APR_OS_START_ERROR = 20000
+APR_OS_START_USERERR = APR_OS_START_ERROR + 50000 * 2
+SVN_ERR_CATEGORY_SIZE = 5000
+
+RE_CAT_NAME = re.compile(r'SVN_ERR_([A-Z_]+)_CATEG')
+RE_CAT_VALUE = re.compile(r'\d+')
+
+RE_DEF_NAME = re.compile(r'SVN_ERRDEF\(([A-Z0-9_]+)')
+RE_DEF_VALUE = re.compile(r'SVN_ERR_([A-Z_]+)_CATEG[^0-9]*([0-9]+)')
+
+
+def write_output(codes):
+ print(HEADER)
+
+ for name, value in codes:
+ # skip SVN_ERR_ on the name
+ print('%s = %d' % (name[8:], value))
+
+
+def main(codes_fname):
+ categ = { }
+ codes = [ ]
+
+ f = open(codes_fname)
+
+ # Parse all the category start values
+ while True:
+ line = f.next()
+ m = RE_CAT_NAME.search(line)
+ if m:
+ name = m.group(1)
+ m = RE_CAT_VALUE.search(f.next())
+ assert m
+ value = int(m.group(0))
+ categ[name] = APR_OS_START_USERERR + value * SVN_ERR_CATEGORY_SIZE
+
+ elif line.strip() == 'SVN_ERROR_START':
+ break
+
+ # Parse each of the error values
+ while True:
+ line = f.next()
+ m = RE_DEF_NAME.search(line)
+ if m:
+ name = m.group(1)
+ line = f.next()
+ m = RE_DEF_VALUE.search(line)
+ if not m:
+ # SVN_ERR_WC_NOT_DIRECTORY is defined as equal to NOT_WORKING_COPY
+ # rather than relative to SVN_ERR_WC_CATEGORY_START
+ #print 'SKIP:', line
+ continue
+ cat = m.group(1)
+ value = int(m.group(2))
+ codes.append((name, categ[cat] + value))
+
+ elif line.strip() == 'SVN_ERROR_END':
+ break
+
+ write_output(sorted(codes))
+
+
+if __name__ == '__main__':
+ this_dir = os.path.dirname(os.path.abspath(__file__))
+ codes_fname = os.path.join(this_dir, os.path.pardir, os.path.pardir,
+ 'subversion', 'include', 'svn_error_codes.h')
+ main(codes_fname)
diff --git a/tools/dev/gen_junit_report.py b/tools/dev/gen_junit_report.py
new file mode 100755
index 0000000..8ce158c
--- /dev/null
+++ b/tools/dev/gen_junit_report.py
@@ -0,0 +1,301 @@
+#!/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# $Id: gen_junit_report.py 1741723 2016-04-30 08:16:53Z stefan2 $
+"""
+gen_junit_report.py -- The script is to generate the junit report for
+Subversion tests. The script uses the log file, tests.log created by
+"make check" process. It parses the log file and generate the junit
+files for each test separately in the specified output directory. The
+script can take --log-file and --output-dir arguments.
+"""
+
+import sys
+import os
+import getopt
+
+def replace_from_map(data, encode):
+ """replace substrings in DATA with replacements defined in ENCODING"""
+ for pattern, replacement in encode.items():
+ data = data.replace(pattern, replacement)
+ return data
+
+xml_encode_map = {
+ '&': '&amp;',
+ '<': '&lt;',
+ '>': '&gt;',
+ '"': '&quot;',
+ "'": '&apos;',
+ }
+
+def xml_encode(data):
+ """encode the xml characters in the data"""
+ return replace_from_map(data, xml_encode_map)
+
+special_encode_map = {
+ ']]>': ']]]]><![CDATA[>', # CDATA terminator sequence
+ '\000': '&#9216;', # U+2400 SYMBOL FOR NULL
+ '\001': '&#9217;', # U+2401 SYMBOL FOR START OF HEADING
+ '\002': '&#9218;', # U+2402 SYMBOL FOR START OF TEXT
+ '\003': '&#9219;', # U+2403 SYMBOL FOR END OF TEXT
+ '\004': '&#9220;', # U+2404 SYMBOL FOR END OF TRANSMISSION
+ '\005': '&#9221;', # U+2405 SYMBOL FOR ENQUIRY
+ '\006': '&#9222;', # U+2406 SYMBOL FOR ACKNOWLEDGE
+ '\007': '&#9223;', # U+2407 SYMBOL FOR BELL
+ '\010': '&#9224;', # U+2408 SYMBOL FOR BACKSPACE
+ '\011': '&#9225;', # U+2409 SYMBOL FOR HORIZONTAL TABULATION
+ #'\012': '&#9226;', # U+240A SYMBOL FOR LINE FEED
+ '\013': '&#9227;', # U+240B SYMBOL FOR VERTICAL TABULATION
+ '\014': '&#9228;', # U+240C SYMBOL FOR FORM FEED
+ #'\015': '&#9229;', # U+240D SYMBOL FOR CARRIAGE RETURN
+ '\016': '&#9230;', # U+240E SYMBOL FOR SHIFT OUT
+ '\017': '&#9231;', # U+240F SYMBOL FOR SHIFT IN
+ '\020': '&#9232;', # U+2410 SYMBOL FOR DATA LINK ESCAPE
+ '\021': '&#9233;', # U+2411 SYMBOL FOR DEVICE CONTROL ONE
+ '\022': '&#9234;', # U+2412 SYMBOL FOR DEVICE CONTROL TWO
+ '\023': '&#9235;', # U+2413 SYMBOL FOR DEVICE CONTROL THREE
+ '\024': '&#9236;', # U+2414 SYMBOL FOR DEVICE CONTROL FOUR
+ '\025': '&#9237;', # U+2415 SYMBOL FOR NEGATIVE ACKNOWLEDGE
+ '\026': '&#9238;', # U+2416 SYMBOL FOR SYNCHRONOUS IDLE
+ '\027': '&#9239;', # U+2417 SYMBOL FOR END OF TRAMSNISSION BLOCK
+ '\030': '&#9240;', # U+2418 SYMBOL FOR CANCEL
+ '\031': '&#9241;', # U+2419 SYMBOL FOR END OF MEDIUM
+ '\032': '&#9242;', # U+241A SYMBOL FOR SUBSTITUTE
+ '\033': '&#9243;', # U+241B SYMBOL FOR ESCAPE
+ '\034': '&#9244;', # U+241C SYMBOL FOR FILE SEPARATOR
+ '\035': '&#9245;', # U+241D SYMBOL FOR GROUP SEPARATOR
+ '\036': '&#9246;', # U+241E SYMBOL FOR RECORD SEPARATOR
+ '\037': '&#9247;', # U+241F SYMBOL FOR UNIT SEPARATOR
+ }
+
+def escape_special_characters(data):
+ """remove special characters in test failure reasons"""
+ if data:
+ data = replace_from_map(data, special_encode_map)
+ return data
+
+def start_junit():
+ """define the beginning of xml document"""
+ head = """<?xml version="1.0" encoding="UTF-8"?>"""
+ return head
+
+def start_testsuite(test_name):
+ """start testsuite. The value for the attributes are replaced later
+ when the junit file handling is concluded"""
+ sub_test_name = test_name.replace('.', '-')
+ start = """<testsuite time="ELAPSED_%s" tests="TOTAL_%s" name="%s"
+ failures="FAIL_%s" errors="FAIL_%s" skipped="SKIP_%s">""" % \
+ (test_name, test_name, sub_test_name, test_name, test_name, test_name)
+ return start
+
+def junit_testcase_ok(test_name, casename):
+ """mark the test case as PASSED"""
+ casename = xml_encode(casename)
+ sub_test_name = test_name.replace('.', '-')
+ case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s"/>""" % \
+ (test_name, casename, sub_test_name)
+ return case
+
+def junit_testcase_fail(test_name, casename, reason=None):
+ """mark the test case as FAILED"""
+ casename = xml_encode(casename)
+ sub_test_name = test_name.replace('.', '-')
+ reason = escape_special_characters(reason)
+ case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s">
+ <failure type="Failed"><![CDATA[%s]]></failure>
+ </testcase>""" % (test_name, casename, sub_test_name, reason)
+ return case
+
+def junit_testcase_xfail(test_name, casename, reason=None):
+ """mark the test case as XFAILED"""
+ casename = xml_encode(casename)
+ sub_test_name = test_name.replace('.', '-')
+ reason = escape_special_characters(reason)
+ case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s">
+ <system-out><![CDATA[%s]]></system-out>
+ </testcase>""" % (test_name, casename, sub_test_name, reason)
+ return case
+
+def junit_testcase_skip(test_name, casename):
+ """mark the test case as SKIPPED"""
+ casename = xml_encode(casename)
+ sub_test_name = test_name.replace('.', '-')
+ case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s">
+ <skipped message="Skipped"/>
+ </testcase>""" % (test_name, casename, sub_test_name)
+ return case
+
+def end_testsuite():
+ """mark the end of testsuite"""
+ end = """</testsuite>"""
+ return end
+
+def update_stat(test_name, junit, count):
+ """update the test statistics in the junit string"""
+ junit_str = '\n'.join(junit)
+ t_count = count[test_name]
+ total = float(t_count['pass'] + t_count['fail'] + t_count['skip'])
+ elapsed = float(t_count['elapsed'])
+ case_time = 0
+ if total > 0: # there are tests with no test cases
+ case_time = elapsed/total
+
+ total_patt = 'TOTAL_%s' % test_name
+ fail_patt = 'FAIL_%s' % test_name
+ skip_patt = 'SKIP_%s' % test_name
+ elapsed_patt = 'ELAPSED_%s' % test_name
+ elapsed_case_patt = 'ELAPSED_CASE_%s' % test_name
+
+ # replace the pattern in junit string with actual statistics
+ junit_str = junit_str.replace(total_patt, "%s" % total)
+ junit_str = junit_str.replace(fail_patt, "%s" % t_count['fail'])
+ junit_str = junit_str.replace(skip_patt, "%s" % t_count['skip'])
+ junit_str = junit_str.replace(elapsed_patt, "%.3f" % elapsed)
+ junit_str = junit_str.replace(elapsed_case_patt, "%.3f" % case_time)
+ return junit_str
+
+def main():
+ """main method"""
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], 'l:d:h',
+ ['log-file=', 'output-dir=', 'help'])
+ except getopt.GetoptError as err:
+ usage(err)
+
+ log_file = None
+ output_dir = None
+ for opt, value in opts:
+ if (opt in ('-h', '--help')):
+ usage()
+ elif (opt in ('-l', '--log-file')):
+ log_file = value
+ elif (opt in ('-d', '--output-dir')):
+ output_dir = value
+ else:
+ usage('Unable to recognize option')
+
+ if not log_file or not output_dir:
+ usage("The options --log-file and --output-dir are mandatory")
+
+ # create junit output directory, if not exists
+ if not os.path.exists(output_dir):
+ print("Directory '%s' not exists, creating ..." % output_dir)
+ try:
+ os.makedirs(output_dir)
+ except OSError as err:
+ sys.stderr.write("ERROR: %s\n" % err)
+ sys.exit(1)
+ patterns = {
+ 'start' : 'START:',
+ 'end' : 'END:',
+ 'pass' : 'PASS:',
+ 'skip' : 'SKIP:',
+ 'fail' : 'FAIL:',
+ 'xfail' : 'XFAIL:',
+ 'elapsed' : 'ELAPSED:'
+ }
+
+ junit = []
+ junit.append(start_junit())
+ reason = None
+ count = {}
+ fp = None
+ try:
+ fp = open(log_file, 'r')
+ except IOError as err:
+ sys.stderr.write("ERROR: %s\n" % err)
+ sys.exit(1)
+
+ for line in fp.readlines():
+ line = line.strip()
+ if line.startswith(patterns['start']):
+ reason = ""
+ test_name = line.split(' ')[1]
+ # replace '.' in test name with '_' to avoid confusing class
+ # name in test result displayed in the CI user interface
+ test_name.replace('.', '_')
+ count[test_name] = {
+ 'pass' : 0,
+ 'skip' : 0,
+ 'fail' : 0,
+ 'xfail' : 0,
+ 'elapsed' : 0,
+ 'total' : 0
+ }
+ junit.append(start_testsuite(test_name))
+ elif line.startswith(patterns['end']):
+ junit.append(end_testsuite())
+ elif line.startswith(patterns['pass']):
+ reason = ""
+ casename = line.strip(patterns['pass']).strip()
+ junit.append(junit_testcase_ok(test_name, casename))
+ count[test_name]['pass'] += 1
+ elif line.startswith(patterns['skip']):
+ reason = ""
+ casename = line.strip(patterns['skip']).strip()
+ junit.append(junit_testcase_skip(test_name, casename))
+ count[test_name]['skip'] += 1
+ elif line.startswith(patterns['fail']):
+ casename = line.strip(patterns['fail']).strip()
+ junit.append(junit_testcase_fail(test_name, casename, reason))
+ count[test_name]['fail'] += 1
+ reason = ""
+ elif line.startswith(patterns['xfail']):
+ casename = line.strip(patterns['xfail']).strip()
+ junit.append(junit_testcase_xfail(test_name, casename, reason))
+ count[test_name]['pass'] += 1
+ reason = ""
+ elif line.startswith(patterns['elapsed']):
+ reason = ""
+ elapsed = line.split(' ')[2].strip()
+ (hrs, mins, secs) = elapsed.split(':')
+ secs_taken = int(hrs)*24 + int(mins)*60 + float(secs)
+ count[test_name]['elapsed'] = secs_taken
+
+ junit_str = update_stat(test_name, junit, count)
+ test_junit_file = os.path.join(output_dir,
+ "%s.junit.xml" % test_name)
+ w_fp = open (test_junit_file, 'w')
+ w_fp.writelines(junit_str)
+ w_fp.close()
+ junit = []
+ elif len(line):
+ reason = "%s\n%s" % (reason, line)
+ fp.close()
+
+def usage(errorMsg=None):
+ script_name = os.path.basename(sys.argv[0])
+ sys.stdout.write("""USAGE: %s: [--help|h] --log-file|l --output-dir|d
+
+Options:
+ --help|-h Display help message
+ --log-file|l The log file to parse for generating junit xml files
+ --output-dir|d The directory to create the junit xml file for each
+ test
+""" % script_name)
+ if errorMsg is not None:
+ sys.stderr.write("\nERROR: %s\n" % errorMsg)
+ sys.exit(1)
+ sys.exit(0)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/gnuify-changelog.pl b/tools/dev/gnuify-changelog.pl
new file mode 100755
index 0000000..a4112c7
--- /dev/null
+++ b/tools/dev/gnuify-changelog.pl
@@ -0,0 +1,164 @@
+#!/usr/bin/perl -w
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+# a script to munge the output of 'svn log' into something approaching the
+# style of a GNU ChangeLog.
+#
+# to use this, just fill in the 'hackers' hash with the usernames and
+# name/emails of the people who work on your project, go to the top level
+# of your working copy, and run:
+#
+# $ svn log | /path/to/gnuify-changelog.pl > ChangeLog
+
+require 5.0;
+use strict;
+
+my %hackers = (
+ "svn" => 'Collab.net Subversion Team',
+ "jimb" => 'Jim Blandy <jimb@redhat.com>',
+ "sussman" => 'Ben Collins-Sussman <sussman@collab.net>',
+ "kfogel" => 'Karl Fogel <kfogel@collab.net>',
+ "gstein" => 'Greg Stein <gstein@lyra.org>',
+ "brane" => 'Branko Cibej <brane@xbc.nu>',
+ "joe" => 'Joe Orton <joe@light.plus.com>',
+ "ghudson" => 'Greg Hudson <ghudson@mit.edu>',
+ "lefty" => 'Lee P. W. Burgess <lefty@red-bean.com>',
+ "fitz" => 'Brian Fitzpatrick <fitz@red-bean.com>',
+ "mab" => 'Matthew Braithwaite <matt@braithwaite.net>',
+ "daniel" => 'Daniel Stenberg <daniel@haxx.se>',
+ "mmurphy" => 'Mark Murphy <mmurphy@collab.net>',
+ "cmpilato" => 'C. Michael Pilato <cmpilato@collab.net>',
+ "kevin" => 'Kevin Pilch-Bisson <kevin@pilch-bisson.net>',
+ "philip" => 'Philip Martin <philip@codematters.co.uk>',
+ "jerenkrantz" => 'Justin Erenkrantz <jerenkrantz@apache.org>',
+ "rooneg" => 'Garrett Rooney <rooneg@electricjellyfish.net>',
+ "bcollins" => 'Ben Collins <bcollins@debian.org>',
+ "blair" => 'Blair Zajac <blair@orcaware.com>',
+ "striker" => 'Sander Striker <striker@apache.org>',
+ "XelaRellum" => 'Alexander Mueller <alex@littleblue.de>',
+ "yoshiki" => 'Yoshiki Hayashi <yoshiki@xemacs.org>',
+ "david" => 'David Summers <david@summersoft.fay.ar.us>',
+ "rassilon" => 'Bill Tutt <rassilon@lyra.org>',
+ "kbohling" => 'Kirby C. Bohling <kbohling@birddog.com>',
+ "breser" => 'Ben Reser <ben@reser.org>',
+ "bliss" => 'Tobias Ringstrom <tobias@ringstrom.mine.nu>',
+ "dionisos" => 'Erik Huelsmann <e.huelsmann@gmx.net>',
+ "josander" => 'Jostein Andersen <jostein@josander.net>',
+ "julianfoad" => 'Julian Foad <julianfoad@btopenworld.com>',
+ "clkao" => 'Chia-Liang Kao <clkao@clkao.org>',
+ "xsteve" => 'Stefan Reichör <reichoer@web.de>',
+ "mbk" => 'Mark Benedetto King <mbk@lowlatency.com>',
+ "patrick" => 'Patrick Mayweg <mayweg@qint.de>',
+ "jrepenning" => 'Jack Repenning <jrepenning@collab.net>',
+ "epg" => 'Eric Gillespie <epg@pretzelnet.org>',
+ "dwhedon" => 'David Kimdon <David_Kimdon@alumni.hmc.edu>',
+ "djh" => 'D.J. Heap <dj@shadyvale.net>',
+ "mprice" => 'Michael Price <mprice@atl.lmco.com>',
+ "jszakmeister" => 'John Szakmeister <john@szakmeister.net>',
+ "bdenny" => 'Brian Denny <brian@briandenny.net>',
+ "rey4" => 'Russell Yanofsky <rey4@columbia.edu>',
+ "maxb" => 'Max Bowsher <maxb@ukf.net>',
+ "dlr" => 'Daniel Rall <dlr@finemaltcoding.com>',
+ "jaa" => 'Jani Averbach <jaa@iki.fi>',
+ "pll" => 'Paul Lussier <p.lussier@comcast.net>',
+ "shlomif" => 'Shlomi Fish <shlomif@vipe.technion.ac.il>',
+ "jpieper" => 'Josh Pieper <jpieper@andrew.cmu.edu>',
+ "dimentiy" => 'Dmitriy O. Popkov <dimentiy@dimentiy.info>',
+ "kellin" => 'Shamim Islam <files@poetryunlimited.com>',
+ "sergeyli" => 'Sergey A. Lipnevich <sergey@optimaltec.com>',
+ "kraai" => 'Matt Kraai <kraai@alumni.cmu.edu>',
+ "ballbach" => 'Michael Ballbach <ballbach@rten.net>',
+ "kon" => 'Kalle Olavi Niemitalo <kon@iki.fi>',
+ "knacke" => 'Kai Nacke <kai.nacke@redstar.de>',
+ "gthompson" => 'Glenn A. Thompson <gthompson@cdr.net>',
+ "jespersm" => 'Jesper Steen Møller <jesper@selskabet.org>',
+ "naked" => 'Nuutti Kotivuori <naked@iki.fi>',
+ "niemeyer" => 'Gustavo Niemeyer <niemeyer@conectiva.com>',
+ "trow" => 'Jon Trowbridge <trow@ximian.com>',
+ "mmacek" => 'Marko Macek <Marko.Macek@gmx.net>',
+ "zbrown" => 'Zack Brown <zbrown@tumblerings.org>',
+ "morten" => 'Morten Ludvigsen <morten@2ps.dk>',
+ "fmatias" => 'Féliciano Matias <feliciano.matias@free.fr>',
+ "nsd" => 'Nick Duffek <nick@duffek.com>',
+);
+
+my $parse_next_line = 0;
+my $last_line_empty = 0;
+my $last_rev = "";
+
+while (my $entry = <>) {
+
+ # Axe windows style line endings, since we should try to be consistent, and
+ # the repos has both styles in its log entries
+ $entry =~ s/\r\n$/\n/;
+
+ # Remove trailing whitespace
+ $entry =~ s/\s+$/\n/;
+
+ my $this_line_empty = $entry eq "\n";
+
+ # Avoid duplicate empty lines
+ next if $this_line_empty and $last_line_empty;
+
+ # Don't fail on valid dash-only lines
+ if ($entry =~ /^-+$/ and length($entry) >= 72) {
+
+ # We're at the start of a log entry, so we need to parse the next line
+ $parse_next_line = 1;
+
+ # Check to see if the final line of the commit message was blank,
+ # if not insert one
+ print "\n" if $last_rev ne "" and !$last_line_empty;
+
+ } elsif ($parse_next_line) {
+
+ # Transform from svn style to GNU style
+ $parse_next_line = 0;
+
+ my @parts = split (/ /, $entry);
+ $last_rev = $parts[0];
+ my $hacker = $parts[2];
+ my $tstamp = $parts[4];
+
+ # Use alias if we can't resolve to name, email
+ $hacker = $hackers{$hacker} if defined $hackers{$hacker};
+
+ printf "%s %s\n", $tstamp, $hacker;
+
+ } elsif ($this_line_empty) {
+
+ print "\n";
+
+ } else {
+
+ print "\t$entry";
+
+ }
+
+ $last_line_empty = $this_line_empty;
+}
+
+# As a HERE doc so it also sets the final changelog's coding
+print <<LOCAL;
+;; Local Variables:
+;; coding: utf-8
+;; End:
+LOCAL
+
+1;
diff --git a/tools/dev/graph-dav-servers.py b/tools/dev/graph-dav-servers.py
new file mode 100755
index 0000000..86ae475
--- /dev/null
+++ b/tools/dev/graph-dav-servers.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# graph-svn-dav.py by Brian W. Fitzpatrick <fitz@red-bean.com>
+#
+# This was originally a quick hack to make a pretty picture of svn DAV servers.
+#
+# I've dropped it in Subversion's repository at the request of Karl Fogel.
+#
+# Be warned this this script has many dependencies that don't ship with Python.
+
+import sys
+import os
+import fileinput
+import datetime
+import time
+import datetime
+from matplotlib import dates
+import matplotlib
+matplotlib.use('Agg')
+from matplotlib import pylab
+import Image
+
+OUTPUT_FILE = '../../www/images/svn-dav-securityspace-survey.png'
+OUTPUT_IMAGE_WIDTH = 800
+
+STATS = [
+ ('1/1/2003', 70),
+ ('2/1/2003', 158),
+ ('3/1/2003', 222),
+ ('4/1/2003', 250),
+ ('5/1/2003', 308),
+ ('6/1/2003', 369),
+ ('7/1/2003', 448),
+ ('8/1/2003', 522),
+ ('9/1/2003', 665),
+ ('10/1/2003', 782),
+ ('11/1/2003', 969),
+ ('12/1/2003', 1009),
+ ('1/1/2004', 1162),
+ ('2/1/2004', 1307),
+ ('3/1/2004', 1424),
+ ('4/1/2004', 1792),
+ ('5/1/2004', 2113),
+ ('6/1/2004', 2502),
+ ('7/1/2004', 2941),
+ ('8/1/2004', 3863),
+ ('9/1/2004', 4174),
+ ('10/1/2004', 4187),
+ ('11/1/2004', 4783),
+ ('12/1/2004', 4995),
+ ('1/1/2005', 5565),
+ ('2/1/2005', 6505),
+ ('3/1/2005', 7897),
+ ('4/1/2005', 8751),
+ ('5/1/2005', 9793),
+ ('6/1/2005', 11534),
+ ('7/1/2005', 12808),
+ ('8/1/2005', 13545),
+ ('9/1/2005', 15233),
+ ('10/1/2005', 17588),
+ ('11/1/2005', 18893),
+ ('12/1/2005', 20278),
+ ('1/1/2006', 21084),
+ ('2/1/2006', 23861),
+ ('3/1/2006', 26540),
+ ('4/1/2006', 29396),
+ ('5/1/2006', 33001),
+ ('6/1/2006', 35082),
+ ('7/1/2006', 38939),
+ ('8/1/2006', 40672),
+ ('9/1/2006', 46525),
+ ('10/1/2006', 54247),
+ ('11/1/2006', 63145),
+ ('12/1/2006', 68988),
+ ('1/1/2007', 77027),
+ ('2/1/2007', 84813),
+ ('3/1/2007', 95679),
+ ('4/1/2007', 103852),
+ ('5/1/2007', 117267),
+ ('6/1/2007', 133665),
+ ('7/1/2007', 137575),
+ ('8/1/2007', 155426),
+ ('9/1/2007', 159055),
+ ('10/1/2007', 169939),
+ ('11/1/2007', 180831),
+ ('12/1/2007', 187093),
+ ('1/1/2008', 199432),
+ ('2/1/2008', 221547),
+ ('3/1/2008', 240794),
+ ('4/1/2008', 255520),
+ ('5/1/2008', 269478),
+ ('6/1/2008', 286614),
+ ('7/1/2008', 294579),
+ ('8/1/2008', 307923),
+ ('9/1/2008', 254757),
+ ('10/1/2008', 268081),
+ ('11/1/2008', 299071),
+ ('12/1/2008', 330884),
+ ('1/1/2009', 369719),
+ ('2/1/2009', 378434),
+ ('3/1/2009', 390502),
+ ('4/1/2009', 408658),
+ ('5/1/2009', 407044),
+ ('6/1/2009', 406520),
+ ('7/1/2009', 334276),
+ ]
+
+
+def get_date(raw_date):
+ month, day, year = map(int, raw_date.split('/'))
+ return datetime.datetime(year, month, day)
+
+
+def get_ordinal_date(date):
+ # This is the only way I can get matplotlib to do the dates right.
+ return int(dates.date2num(get_date(date)))
+
+
+def load_stats():
+ dates = [get_ordinal_date(date) for date, value in STATS]
+ counts = [x[1] for x in STATS]
+
+ return dates, counts
+
+
+def draw_graph(dates, counts):
+ ###########################################################
+ # Drawing takes place here.
+ pylab.figure(1)
+
+ ax = pylab.subplot(111)
+ pylab.plot_date(dates, counts,
+ color='r', linestyle='-', marker='o', markersize=3)
+
+ ax.xaxis.set_major_formatter( pylab.DateFormatter('%Y') )
+ ax.xaxis.set_major_locator( pylab.YearLocator() )
+ ax.xaxis.set_minor_locator( pylab.MonthLocator() )
+ ax.set_xlim( (dates[0] - 92, dates[len(dates) - 1] + 92) )
+
+ ax.yaxis.set_major_formatter( pylab.FormatStrFormatter('%d') )
+
+ pylab.ylabel('Total # of Public DAV Servers')
+
+ lastdate = datetime.datetime.fromordinal(dates[len(dates) - 1]).strftime("%B %Y")
+ pylab.xlabel("Data as of " + lastdate)
+ pylab.title('Security Space Survey of\nPublic Subversion DAV Servers')
+ # End drawing
+ ###########################################################
+ png = open(OUTPUT_FILE, 'w')
+ pylab.savefig(png)
+ png.close()
+ os.rename(OUTPUT_FILE, OUTPUT_FILE + ".tmp.png")
+ try:
+ im = Image.open(OUTPUT_FILE + ".tmp.png", 'r')
+ (width, height) = im.size
+ print("Original size: %d x %d pixels" % (width, height))
+ scale = float(OUTPUT_IMAGE_WIDTH) / float(width)
+ width = OUTPUT_IMAGE_WIDTH
+ height = int(float(height) * scale)
+ print("Final size: %d x %d pixels" % (width, height))
+ im = im.resize((width, height), Image.ANTIALIAS)
+ im.save(OUTPUT_FILE, im.format)
+ os.unlink(OUTPUT_FILE + ".tmp.png")
+ except Exception as e:
+ sys.stderr.write("Error attempting to resize the graphic: %s\n" % (str(e)))
+ os.rename(OUTPUT_FILE + ".tmp.png", OUTPUT_FILE)
+ raise
+ pylab.close()
+
+
+if __name__ == '__main__':
+ dates, counts = load_stats()
+ draw_graph(dates, counts)
+ print("Don't forget to update ../../www/svn-dav-securityspace-survey.html!")
diff --git a/tools/dev/histogram.py b/tools/dev/histogram.py
new file mode 100755
index 0000000..1923c78
--- /dev/null
+++ b/tools/dev/histogram.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import sys
+import operator
+
+
+def count(infile):
+ # infile should be a simple file with author names on each line
+ counts = {}
+ for line in infile:
+ author = line.strip()
+ counts[author] = counts.get(author, 0) + 1
+
+ return counts
+
+
+def histogram(counts, width):
+ max_len = max([len(author) for author in counts.keys()])
+ max_count = max(counts.values())
+
+ adjustor = float(max_count) / (width - max_len - 3)
+
+ for author, count in sorted(counts.items(),
+ key=operator.itemgetter(1), # sort on count
+ reverse=True):
+ print("%-*s | %s" % (max_len, author, "X"*int(count/adjustor)))
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ ### TODO: Automagically determine terminal width
+ width = 80
+ else:
+ width = int(sys.argv[1])
+ histogram(count(sys.stdin), width)
diff --git a/tools/dev/iz/defect.dem b/tools/dev/iz/defect.dem
new file mode 100644
index 0000000..7756b7c
--- /dev/null
+++ b/tools/dev/iz/defect.dem
@@ -0,0 +1,6 @@
+set title "Subversion DEFECT Activity"
+set boxwidth 0.5
+set data style lines
+set key 10, 60
+plot "/tmp/points.found.DEFECT" title "found" with boxes, "/tmp/points.fixed.DEFECT" title "fixed" with boxes, "/tmp/points.avg.DEFECT" title "moving avg", "/tmp/points.open.DEFECT" title "open"
+pause -1 "Hit return to continue"
diff --git a/tools/dev/iz/ff2csv.command b/tools/dev/iz/ff2csv.command
new file mode 100755
index 0000000..6826e34
--- /dev/null
+++ b/tools/dev/iz/ff2csv.command
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+# MacOS X do-hickie to run ff2csv.py, with parameters, by double-click.
+
+
+flags="hq"
+Usage () {
+ args="$*"
+ if [[ -n "$args" ]] ; then
+ echo >&2 "$args"
+ fi
+ echo >&2 "Usage: $0 [-$flags] [querysetfile [csvfile]]
+Run ff2csv.py, fetching and summarizing SVN bug status."
+}
+while getopts $flags flag; do
+ case "$flag" in
+ h|q) Usage; exit 0;;
+ esac
+done
+
+# we want to run in the same folder as this script, not
+# the users home folder
+cd `dirname $0`
+
+
+date=`date +%m%d`
+./ff2csv.py ${1:-query-set-1-$date.tsv} ${2:-core-history-$date.csv}
diff --git a/tools/dev/iz/ff2csv.py b/tools/dev/iz/ff2csv.py
new file mode 100755
index 0000000..dca127e
--- /dev/null
+++ b/tools/dev/iz/ff2csv.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# -*- Python -*-
+"""Transform find-fix.py output into Excellable csv."""
+
+__date__ = "Time-stamp: <2003-10-16 13:26:27 jrepenning>"[13:30]
+__author__ = "Jack Repenning <jrepenning@collab.net>"
+
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import inspect
+import os
+import os.path
+import pydoc
+import re
+import shutil
+import string
+import sys
+import time
+
+# Long options and their usage strings; "=" means it takes an argument.
+# To get a list suitable for getopt, just do
+#
+# [x[0] for x in long_opts]
+#
+# Make sure to sacrifice a lamb to Guido for each element of the list.
+long_opts = [
+ ["doc", """Optional, print pydocs."""],
+ ["help", """Optional, print usage (this text)."""],
+ ["verbose", """Optional, print more progress messages."""],
+ ]
+
+help = 0
+verbose = 0
+me = os.path.basename(sys.argv[0])
+
+DATA_FILE = "http://subversion.tigris.org/iz-data/query-set-1.tsv"
+
+def main():
+ """Run find-fix.py with arguments du jour for drawing pretty
+manager-speak pictures."""
+
+ global verbose
+
+ try:
+ opts, args = my_getopt(sys.argv[1:], "", [x[0] for x in long_opts])
+ except getopt.GetoptError as e:
+ print("Error: %s" % e.msg)
+ shortusage()
+ print(me + " --help for options.")
+ sys.exit(1)
+
+ for opt, arg in opts:
+ if opt == "--help":
+ usage()
+ sys.exit(0)
+ elif opt == "--verbose":
+ verbose = 1
+ elif opt == "--doc":
+ pydoc.doc(pydoc.importfile(sys.argv[0]))
+ sys.exit(0)
+
+ # do something fruitful with your life
+ if len(args) == 0:
+ args = ["query-set-1.tsv", "core-history.csv"]
+ print(("ff2csv %s %s" % args))
+
+ if len(args) != 2:
+ print("%s: Wrong number of args." % me)
+ shortusage()
+ sys.exit(1)
+
+ if os.system("curl " + DATA_FILE + "> " + args[0]):
+ os.system("wget " + DATA_FILE)
+
+ outfile = open(args[1], "w")
+ outfile.write("Date,found,fixed,inval,dup,other,remain\n")
+
+ totalsre = re.compile("totals:.*found= +([0-9]+) +"
+ "fixed= +([0-9]+) +"
+ "inval= +([0-9]+) +"
+ "dup= +([0-9]+) +"
+ "other= +([0-9]+) +"
+ "remain= *([0-9]+)")
+ for year in ("2001", "2002", "2003", "2004"):
+ for month in ("01", "02", "03", "04", "05", "06", "07", "08",
+ "09", "10", "11", "12"):
+ for dayrange in (("01", "08"),
+ ("08", "15"),
+ ("15", "22"),
+ ("22", "28")):
+ if verbose:
+ print("searching %s-%s-%s to %s" % (year,
+ month,
+ dayrange[0],
+ dayrange[1]))
+ ffpy = os.popen("python ./find-fix.py --m=beta "
+ "%s %s-%s-%s %s-%s-%s"
+ % (args[0],
+ year, month, dayrange[0],
+ year, month, dayrange[1]))
+ if verbose:
+ print("ffpy: %s" % ffpy)
+
+ line = ffpy.readline()
+ if verbose:
+ print("initial line is: %s" % line)
+ matches = totalsre.search(line)
+ if verbose:
+ print("initial match is: %s" % matches)
+ while line and not matches:
+ line = ffpy.readline()
+ if verbose:
+ print("%s: read line '%s'" % (me, line))
+ matches = totalsre.search(line)
+ if verbose:
+ print("subsequent line is: %s" % line)
+
+ ffpy.close()
+
+ if verbose:
+ print("line is %s" % line)
+
+ if matches.group(1) != "0" \
+ or matches.group(2) != "0" \
+ or matches.group(3) != "0" \
+ or matches.group(4) != "0" \
+ or matches.group(5) != "0":
+
+ outfile.write("%s-%s-%s,%s,%s,%s,%s,%s,%s\n"
+ % (year, month, dayrange[1],
+ matches.group(1),
+ matches.group(2),
+ matches.group(3),
+ matches.group(4),
+ matches.group(5),
+ matches.group(6),
+ ))
+ elif matches.group(6) != "0":
+ # quit at first nothing-done week
+ # allows slop in loop controls
+ break
+ outfile.close()
+
+
+def shortusage():
+ "Print one-line usage summary."
+ print("%s - %s" % (me, pydoc.synopsis(sys.argv[0])))
+
+def usage():
+ "Print multi-line usage tome."
+ shortusage()
+ print('''%s [opts] [queryfile [outfile]]
+Option keywords may be abbreviated to any unique prefix.
+Option order is not important.
+Most options require "=xxx" arguments:''' % me)
+ for x in long_opts:
+ padding_limit = 18
+ if x[0][-1:] == '=':
+ sys.stdout.write(" --%s " % x[0][:-1])
+ padding_limit = 19
+ else:
+ sys.stdout.write(" --%s " % x[0])
+ print("%s %s" % ((' ' * (padding_limit - len(x[0]))), x[1]))
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/dev/iz/find-fix.py b/tools/dev/iz/find-fix.py
new file mode 100755
index 0000000..8761b8e
--- /dev/null
+++ b/tools/dev/iz/find-fix.py
@@ -0,0 +1,454 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# -*- Python -*-
+"""find-fix.py: produce a find/fix report for Subversion's IZ database
+
+For simple text summary:
+ find-fix.py query-set-1.tsv YYYY-MM-DD YYYY-MM-DD
+Statistics will be printed for bugs found or fixed within the
+time frame.
+
+For gnuplot presentation:
+ find-fix.py query-set-1.tsv outfile
+Gnuplot provides its own way to select date ranges.
+
+Either way, get a query-set-1.tsv from:
+ http://subversion.tigris.org/iz-data/query-set-1.tsv (updated nightly)
+See http://subversion.tigris.org/iz-data/README for more info on that file.
+
+For more usage info on this script:
+ find-fix.py --help
+"""
+
+_version = "$Revision:"
+
+#
+# This can be run over the data file found at:
+# http://subversion.tigris.org/iz-data/query-set-1.tsv
+#
+
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import operator
+import os
+import os.path
+import pydoc
+import re
+try:
+ # Python >=2.6
+ from functools import reduce
+except ImportError:
+ # Python <2.6
+ pass
+import sys
+import time
+
+me = os.path.basename(sys.argv[0])
+
+# Long options and their usage strings; "=" means it takes an argument.
+# To get a list suitable for getopt, just do
+#
+# [x[0] for x in long_opts]
+#
+# Make sure to sacrifice a lamb to Guido for each element of the list.
+long_opts = [
+ ["milestones=", """Optional, milestones NOT to report on
+ (one or more of Beta, 1.0, Post-1.0, cvs2svn-1.0, cvs2svn-opt,
+ inapplicable)"""],
+ ["update", """Optional, update the statistics first."""],
+ ["doc", """Optional, print pydocs."""],
+ ["help", """Optional, print usage (this text)."""],
+ ["verbose", """Optional, print more progress messages."""],
+ ]
+
+help = 0
+verbose = 0
+update = 0
+
+DATA_FILE = "http://subversion.tigris.org/iz-data/query-set-1.tsv"
+ONE_WEEK = 7 * 24 * 60 * 60
+
+_types = []
+_milestone_filter = []
+
+noncore_milestone_filter = [
+ 'Post-1.0',
+ '1.1',
+ 'cvs2svn-1.0',
+ 'cvs2svn-opt',
+ 'inapplicable',
+ 'no milestone',
+ ]
+
+one_point_oh_milestone_filter = noncore_milestone_filter + []
+
+beta_milestone_filter = one_point_oh_milestone_filter + ['1.0']
+
+
+_types = [
+ 'DEFECT',
+ 'TASK',
+ 'FEATURE',
+ 'ENHANCEMENT',
+ 'PATCH',
+ ]
+
+
+def main():
+ """Report bug find/fix rate statistics for Subversion."""
+
+ global verbose
+ global update
+ global _types
+ global _milestone_filter
+ global noncore_milestone_filter
+
+ try:
+ opts, args = my_getopt(sys.argv[1:], "", [x[0] for x in long_opts])
+ except getopt.GetoptError as e:
+ sys.stderr.write("Error: %s\n" % e.msg)
+ shortusage()
+ sys.stderr.write("%s --help for options.\n" % me)
+ sys.exit(1)
+
+ for opt, arg in opts:
+ if opt == "--help":
+ usage()
+ sys.exit(0)
+ elif opt == "--verbose":
+ verbose = 1
+ elif opt == "--milestones":
+ for mstone in arg.split(","):
+ if mstone == "noncore":
+ _milestone_filter = noncore_milestone_filter
+ elif mstone == "beta":
+ _milestone_filter = beta_milestone_filter
+ elif mstone == "one":
+ _milestone_filter = one_point_oh_milestone_filter
+ elif mstone[0] == '-':
+ if mstone[1:] in _milestone_filter:
+ spot = _milestone_filter.index(mstone[1:])
+ _milestone_filter = _milestone_filter[:spot] \
+ + _milestone_filter[(spot+1):]
+ else:
+ _milestone_filter += [mstone]
+
+ elif opt == "--update":
+ update = 1
+ elif opt == "--doc":
+ pydoc.doc(pydoc.importfile(sys.argv[0]))
+ sys.exit(0)
+
+ if len(_milestone_filter) == 0:
+ _milestone_filter = noncore_milestone_filter
+
+ if verbose:
+ sys.stderr.write("%s: Filtering out milestones %s.\n"
+ % (me, ", ".join(_milestone_filter)))
+
+ if len(args) == 2:
+ if verbose:
+ sys.stderr.write("%s: Generating gnuplot data.\n" % me)
+ if update:
+ if verbose:
+ sys.stderr.write("%s: Updating %s from %s.\n" % (me, args[0], DATA_FILE))
+ if os.system("curl " + DATA_FILE + "> " + args[0]):
+ os.system("wget " + DATA_FILE)
+ plot(args[0], args[1])
+
+ elif len(args) == 3:
+ if verbose:
+ sys.stderr.write("%s: Generating summary from %s to %s.\n"
+ % (me, args[1], args[2]))
+ if update:
+ if verbose:
+ sys.stderr.write("%s: Updating %s from %s.\n" % (me, args[0], DATA_FILE))
+ if os.system("curl " + DATA_FILE + "> " + args[0]):
+ os.system("wget " + DATA_FILE)
+
+ try:
+ t_start = parse_time(args[1] + " 00:00:00")
+ except ValueError:
+ sys.stderr.write('%s: ERROR: bad time value: %s\n' % (me, args[1]))
+ sys.exit(1)
+
+ try:
+ t_end = parse_time(args[2] + " 00:00:00")
+ except ValueError:
+ sys.stderr.write('%s: ERROR: bad time value: %s\n' % (me, args[2]))
+ sys.exit(1)
+
+ summary(args[0], t_start, t_end)
+ else:
+ usage()
+
+ sys.exit(0)
+
+
+def summary(datafile, d_start, d_end):
+ "Prints a summary of activity within a specified date range."
+
+ data = load_data(datafile)
+
+ # activity during the requested period
+ found, fixed, inval, dup, other = extract(data, 1, d_start, d_end)
+
+ # activity from the beginning of time to the end of the request
+ # used to compute remaining
+ # XXX It would be faster to change extract to collect this in one
+ # pass. But we don't presently have enough data, nor use this
+ # enough, to justify that rework.
+ fromzerofound, fromzerofixed, fromzeroinval, fromzerodup, fromzeroother \
+ = extract(data, 1, 0, d_end)
+
+ alltypes_found = alltypes_fixed = alltypes_inval = alltypes_dup \
+ = alltypes_other = alltypes_rem = 0
+ for t in _types:
+ fromzerorem_t = fromzerofound[t]\
+ - (fromzerofixed[t] + fromzeroinval[t] + fromzerodup[t]
+ + fromzeroother[t])
+ print('%12s: found=%3d fixed=%3d inval=%3d dup=%3d ' \
+ 'other=%3d remain=%3d' \
+ % (t, found[t], fixed[t], inval[t], dup[t], other[t], fromzerorem_t))
+ alltypes_found = alltypes_found + found[t]
+ alltypes_fixed = alltypes_fixed + fixed[t]
+ alltypes_inval = alltypes_inval + inval[t]
+ alltypes_dup = alltypes_dup + dup[t]
+ alltypes_other = alltypes_other + other[t]
+ alltypes_rem = alltypes_rem + fromzerorem_t
+
+ print('-' * 77)
+ print('%12s: found=%3d fixed=%3d inval=%3d dup=%3d ' \
+ 'other=%3d remain=%3d' \
+ % ('totals', alltypes_found, alltypes_fixed, alltypes_inval,
+ alltypes_dup, alltypes_other, alltypes_rem))
+ # print '%12s find/fix ratio: %g%%' \
+ # % (" "*12, (alltypes_found*100.0/(alltypes_fixed
+ # + alltypes_inval + alltypes_dup + alltypes_other)))
+
+
+def plot(datafile, outbase):
+ "Generates data files intended for use by gnuplot."
+
+ global _types
+
+ data = load_data(datafile)
+
+ t_min = 1L<<32
+ for issue in data:
+ if issue.created < t_min:
+ t_min = issue.created
+
+ # break the time up into a tuple, then back up to Sunday
+ t_start = time.localtime(t_min)
+ t_start = time.mktime((t_start[0], t_start[1], t_start[2] - t_start[6] - 1,
+ 0, 0, 0, 0, 0, 0))
+
+ plots = { }
+ for t in _types:
+ # for each issue type, we will record per-week stats, compute a moving
+ # average of the find/fix delta, and track the number of open issues
+ plots[t] = [ [ ], MovingAverage(), 0 ]
+
+ week = 0
+ for date in range(t_start, time.time(), ONE_WEEK):
+ ### this is quite inefficient, as we could just sort by date, but
+ ### I'm being lazy
+ found, fixed = extract(data, None, date, date + ONE_WEEK - 1)
+
+ for t in _types:
+ per_week, avg, open_issues = plots[t]
+ delta = found[t] - fixed[t]
+ per_week.append((week, date,
+ found[t], -fixed[t], avg.add(delta), open_issues))
+ plots[t][2] = open_issues + delta
+
+ week = week + 1
+
+ for t in _types:
+ week_data = plots[t][0]
+ write_file(week_data, outbase, t, 'found', 2)
+ write_file(week_data, outbase, t, 'fixed', 3)
+ write_file(week_data, outbase, t, 'avg', 4)
+ write_file(week_data, outbase, t, 'open', 5)
+
+def write_file(week_data, base, type, tag, idx):
+ f = open('%s.%s.%s' % (base, tag, type), 'w')
+ for info in week_data:
+ f.write('%s %s # %s\n' % (info[0], info[idx], time.ctime(info[1])))
+
+
+class MovingAverage:
+ "Helper class to compute moving averages."
+ def __init__(self, n=4):
+ self.n = n
+ self.data = [ 0 ] * n
+ def add(self, value):
+ self.data.pop(0)
+ self.data.append(float(value) / self.n)
+ return self.avg()
+ def avg(self):
+ return reduce(operator.add, self.data)
+
+
+def extract(data, details, d_start, d_end):
+ """Extract found/fixed counts for each issue type within the data range.
+
+ If DETAILS is false, then return two dictionaries:
+
+ found, fixed
+
+ ...each mapping issue types to the number of issues of that type
+ found or fixed respectively.
+
+ If DETAILS is true, return five dictionaries:
+
+ found, fixed, invalid, duplicate, other
+
+ The first is still the found issues, but the other four break down
+ the resolution into 'FIXED', 'INVALID', 'DUPLICATE', and a grab-bag
+ category for 'WORKSFORME', 'LATER', 'REMIND', and 'WONTFIX'."""
+
+ global _types
+ global _milestone_filter
+
+ found = { }
+ fixed = { }
+ invalid = { }
+ duplicate = { }
+ other = { } # "WORKSFORME", "LATER", "REMIND", and "WONTFIX"
+
+ for t in _types:
+ found[t] = fixed[t] = invalid[t] = duplicate[t] = other[t] = 0
+
+ for issue in data:
+ # filter out disrespected milestones
+ if issue.milestone in _milestone_filter:
+ continue
+
+ # record the found/fixed counts
+ if d_start <= issue.created <= d_end:
+ found[issue.type] = found[issue.type] + 1
+ if d_start <= issue.resolved <= d_end:
+ if details:
+ if issue.resolution == "FIXED":
+ fixed[issue.type] = fixed[issue.type] + 1
+ elif issue.resolution == "INVALID":
+ invalid[issue.type] = invalid[issue.type] + 1
+ elif issue.resolution == "DUPLICATE":
+ duplicate[issue.type] = duplicate[issue.type] + 1
+ else:
+ other[issue.type] = other[issue.type] + 1
+ else:
+ fixed[issue.type] = fixed[issue.type] + 1
+
+ if details:
+ return found, fixed, invalid, duplicate, other
+ else:
+ return found, fixed
+
+
+def load_data(datafile):
+ "Return a list of Issue objects for the specified data."
+ return list(map(Issue, open(datafile).readlines()))
+
+
+class Issue:
+ "Represents a single issue from the exported IssueZilla data."
+
+ def __init__(self, line):
+ row = line.strip().split('\t')
+
+ self.id = int(row[0])
+ self.type = row[1]
+ self.reporter = row[2]
+ if row[3] == 'NULL':
+ self.assigned = None
+ else:
+ self.assigned = row[3]
+ self.milestone = row[4]
+ self.created = parse_time(row[5])
+ self.resolution = row[7]
+ if not self.resolution:
+ # If the resolution is empty, then force the resolved date to None.
+ # When an issue is reopened, there will still be activity showing
+ # a "RESOLVED", thus we get a resolved date. But we simply want to
+ # ignore that date.
+ self.resolved = None
+ else:
+ self.resolved = parse_time(row[6])
+ self.summary = row[8]
+
+
+parse_time_re = re.compile('([0-9]{4})-([0-9]{2})-([0-9]{2}) '
+ '([0-9]{2}):([0-9]{2}):([0-9]{2})')
+
+def parse_time(t):
+ "Convert an exported MySQL timestamp into seconds since the epoch."
+
+ global parse_time_re
+
+ if t == 'NULL':
+ return None
+ try:
+ matches = parse_time_re.match(t)
+ return time.mktime((int(matches.group(1)),
+ int(matches.group(2)),
+ int(matches.group(3)),
+ int(matches.group(4)),
+ int(matches.group(5)),
+ int(matches.group(6)),
+ 0, 0, -1))
+ except ValueError:
+ sys.stderr.write('ERROR: bad time value: %s\n'% t)
+ sys.exit(1)
+
+def shortusage():
+ print(pydoc.synopsis(sys.argv[0]))
+ print("""
+For simple text summary:
+ find-fix.py [options] query-set-1.tsv YYYY-MM-DD YYYY-MM-DD
+
+For gnuplot presentation:
+ find-fix.py [options] query-set-1.tsv outfile
+""")
+
+def usage():
+ shortusage()
+ for x in long_opts:
+ padding_limit = 18
+ if x[0][-1:] == '=':
+ sys.stdout.write(" --%s " % x[0][:-1])
+ padding_limit = 19
+ else:
+ sys.stdout.write(" --%s " % x[0])
+ print("%s %s" % ((' ' * (padding_limit - len(x[0]))), x[1]))
+ print('''
+Option keywords may be abbreviated to any unique prefix.
+Most options require "=xxx" arguments.
+Option order is not important.''')
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/iz/run-queries.sh b/tools/dev/iz/run-queries.sh
new file mode 100755
index 0000000..990caf5
--- /dev/null
+++ b/tools/dev/iz/run-queries.sh
@@ -0,0 +1,62 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+if test $# != 3; then
+ echo "USAGE: $0 DATABASE_USER DATABASE_PASSWORD MYSQL_DATABASE"
+ exit 1
+fi
+
+dbuser="$1"
+dbpass="$2"
+dbdb="$3"
+
+q1='select issues.issue_id, issue_type, user1.LOGIN_NAME "reporter",
+ user2.LOGIN_NAME "assigned_to", target_milestone, creation_ts,
+ max(issue_when) "resolved_ts", resolution, short_desc
+ from issues left join issues_activity
+ on issues.issue_id=issues_activity.issue_id and newvalue="RESOLVED",
+ profiles prof1,
+ profiles prof2 left join tigris.HELM_USER user1
+ on user1.USER_ID=prof1.helm_user_id
+ left join tigris.HELM_USER user2
+ on user2.USER_ID=prof2.helm_user_id
+ where prof1.userid=reporter and prof2.userid=assigned_to
+ group by issues.issue_id
+ order by issues.issue_id'
+
+q2='select issues.issue_id, issue_type, user1.LOGIN_NAME "reporter",
+ user2.LOGIN_NAME "assigned_to", target_milestone, creation_ts,
+ max(issue_when) "resolved_ts", resolution, short_desc,
+ priority
+ from issues left join issues_activity
+ on issues.issue_id=issues_activity.issue_id and newvalue="RESOLVED",
+ profiles prof1,
+ profiles prof2 left join tigris.HELM_USER user1
+ on user1.USER_ID=prof1.helm_user_id
+ left join tigris.HELM_USER user2
+ on user2.USER_ID=prof2.helm_user_id
+ where prof1.userid=reporter and prof2.userid=assigned_to
+ group by issues.issue_id
+ order by issues.issue_id'
+
+mysql --batch -e "use $dbdb; $q1" --user=$dbuser --password=$dbpass --silent > iz-data/query-set-1.tsv
+mysql --batch -e "use $dbdb; $q2" --user=$dbuser --password=$dbpass --silent > iz-data/query-set-2.tsv
diff --git a/tools/dev/lock-check.py b/tools/dev/lock-check.py
new file mode 100755
index 0000000..710bf48
--- /dev/null
+++ b/tools/dev/lock-check.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+### Repository lock checker. Gets an exclusive lock on the provided
+### repository, then runs db_stat to see if the lock counts have been
+### reset to 0. If not, prints the timestamp of the run and a message
+### about accumulation.
+
+DB_STAT = 'db_stat'
+
+
+import sys
+import os
+import os.path
+import time
+import fcntl
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+def usage_and_exit(retval):
+ if retval:
+ out = sys.stderr
+ else:
+ out = sys.stdout
+ out.write("""Usage: %s [OPTIONS] REPOS-PATH
+
+Options:
+ --help (-h) : Show this usage message
+ --non-blocking : Don't wait for a lock that can't be immediately obtained
+
+Obtain an exclusive lock (waiting for one unless --non-blocking is
+passed) on REPOS-PATH, then check its lock usage counts. If there is
+any accumulation present, report that accumulation to stdout.
+""" % (os.path.basename(sys.argv[0])))
+ sys.exit(retval)
+
+def main():
+ now_time = time.asctime()
+ repos_path = None
+ nonblocking = 0
+
+ # Parse the options.
+ optlist, args = my_getopt(sys.argv[1:], "h", ['non-blocking', 'help'])
+ for opt, arg in optlist:
+ if opt == '--help' or opt == '-h':
+ usage_and_exit(0)
+ if opt == '--non-blocking':
+ nonblocking = 1
+ else:
+ usage_and_exit(1)
+
+ # We need at least a path to work with, here.
+ argc = len(args)
+ if argc < 1 or argc > 1:
+ usage_and_exit(1)
+ repos_path = args[0]
+
+ fd = open(os.path.join(repos_path, 'locks', 'db.lock'), 'a')
+ try:
+ # Get an exclusive lock on the repository lock file, but maybe
+ # don't wait for it.
+ try:
+ mode = fcntl.LOCK_EX
+ if nonblocking:
+ mode = mode | fcntl.LOCK_NB
+ fcntl.lockf(fd, mode)
+ except IOError:
+ sys.stderr.write("Error obtaining exclusive lock.\n")
+ sys.exit(1)
+
+ # Grab the db_stat results.
+ lines = os.popen('%s -ch %s' % (DB_STAT, os.path.join(repos_path, 'db')))
+ log_lines = []
+ for line in lines:
+ pieces = line.split('\t')
+ if (pieces[1].find('current lock') != -1) and (int(pieces[0]) > 0):
+ log = ''
+ if not len(log_lines):
+ log = log + "[%s] Lock accumulation for '%s'\n" \
+ % (now_time, repos_path)
+ log = log + ' ' * 27
+ log = log + "%s\t%s" % (pieces[0], pieces[1])
+ log_lines.append(log)
+ if len(log_lines):
+ sys.stdout.write(''.join(log_lines))
+ finally:
+ # Unlock the lockfile
+ fcntl.lockf(fd, fcntl.LOCK_UN)
+ fd.close()
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/dev/log_revnum_change_asf.py b/tools/dev/log_revnum_change_asf.py
new file mode 100755
index 0000000..30f5507
--- /dev/null
+++ b/tools/dev/log_revnum_change_asf.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Script to change old (svn.collab.net) revision numbers in subversion log
+messages to new ASF subversion repository revision numbers.
+"""
+
+USAGE = """python log_revnum_change_asf.py [OPTION]... URL
+
+Change the revision numbers relatively in the log messages of new ASF
+subversion repository.
+"""
+
+from csvn.repos import RemoteRepository
+from csvn.auth import User
+import csvn.core
+from optparse import OptionParser
+import sys
+import re
+
+def repl_newrev(matchobj):
+ """
+ Revision to be substituted is provided here.
+ """
+ if matchobj.group(0):
+ old_rev = int(matchobj.group(0)[1:])
+ if old_rev <= 45000:
+ return 'r'+str(old_rev + 840074)
+ else:
+ return 'r'+str(old_rev)
+
+def main():
+ """
+ Script execution starts here.
+ """
+
+ parser = OptionParser(usage=USAGE)
+ parser.add_option("-u", "", dest="username",
+ help="commit the changes as USERNAME")
+ parser.add_option("-p", "", dest="password",
+ help="commit the changes with PASSWORD")
+ parser.add_option("-r", "", dest="rev",
+ help="revision range")
+
+ (options, args) = parser.parse_args()
+
+ if len(args) != 1:
+ parser.print_help()
+ sys.exit(1)
+
+ csvn.core.svn_cmdline_init("", csvn.core.stderr)
+ repos_url = args[0]
+ revs = options.rev
+ if revs and ":" in revs:
+ [start_rev, end_rev] = revs.split(":")
+ elif revs:
+ start_rev = revs
+ end_rev = revs
+ else:
+ start_rev = 1
+ end_rev = "HEAD"
+
+ session = RemoteRepository(repos_url, user=User(options.username,
+ options.password))
+
+ if end_rev == "HEAD":
+ end_rev = session.latest_revnum()
+ if start_rev == "HEAD":
+ start_rev = session.latest_revnum()
+ start_rev = int(start_rev)
+ end_rev = int(end_rev)
+
+ for entry in session.log(start_rev, end_rev):
+ new_log = re.sub(r'(r\d+)', repl_newrev, entry.message)
+ session.revprop_set(propname='svn:log',
+ propval=new_log,
+ revnum=entry.revision,
+ force=True)
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/dev/merge-graph.py b/tools/dev/merge-graph.py
new file mode 100755
index 0000000..f587de8
--- /dev/null
+++ b/tools/dev/merge-graph.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+args_message = '[-f png|svg|gif|dia... [-f ...]] GRAPH_CONFIG_FILE...'
+help_message = """Produce pretty graphs representing branches and merging.
+For each config file specified, construct a graph and write it as a PNG file
+(or other graphical file formats)."""
+
+import sys
+import getopt
+from mergegraph import MergeDot
+
+
+# If run as a program, process each input filename as a graph config file.
+if __name__ == '__main__':
+ optlist, args = getopt.getopt(sys.argv[1:], 'f:', ['format'])
+
+ prog_name = sys.argv[0]
+ if not args:
+ usage = '%s: usage: "%s %s"\n' % (prog_name, prog_name, args_message)
+ sys.stderr.write(usage)
+ sys.exit(1)
+
+ formats = []
+
+ for opt, opt_arg in optlist:
+ if opt == '-f':
+ formats.append(opt_arg)
+
+ if not formats:
+ formats.append('png')
+
+ for config_filename in args:
+ sys.stdout.write("%s: reading '%s', " % (prog_name, config_filename))
+ graph = MergeDot(config_filename, rankdir='LR', dpi='72')
+ for format in formats:
+ filename = '%s.%s' % (graph.basename, format)
+ sys.stdout.write("writing '%s' " % filename)
+ graph.save(format=format, filename=filename)
+ print
diff --git a/tools/dev/mergegraph/__init__.py b/tools/dev/mergegraph/__init__.py
new file mode 100644
index 0000000..6dfb7d9
--- /dev/null
+++ b/tools/dev/mergegraph/__init__.py
@@ -0,0 +1,20 @@
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+from mergegraph import MergeDot, MergeGraph, MergeSubgraph
diff --git a/tools/dev/mergegraph/mergegraph.py b/tools/dev/mergegraph/mergegraph.py
new file mode 100644
index 0000000..c2e5523
--- /dev/null
+++ b/tools/dev/mergegraph/mergegraph.py
@@ -0,0 +1,313 @@
+#!/usr/bin/env python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# Config file format:
+example = """
+ [graph]
+ filename = merge-sync-1.png
+ title = Sync Merge: CC vs SVN
+ # Branches: (branch name, branched from node, first rev, last rev).
+ branches = [
+ ('A', 'O0', 1, 4),
+ ('O', None, 0, 0),
+ ('B', 'O0', 1, 5)
+ ]
+ # Changes: nodes in which a change was committed; merge targets need not
+ # be listed here.
+ changes = [
+ 'A1', 'A2', 'A3', 'A4',
+ 'B1', 'B2', 'B3', 'B4', 'B5'
+ ]
+ # Merges: (base node, source-right node, target node, label).
+ # Base is also known as source-left.
+ merges = [
+ ('O0', 'A:1', 'B3', 'sync'),
+ ('A2', 'A:3', 'B5', 'sync'),
+ ]
+ # Annotations for nodes: (node, annotation text).
+ annotations = [
+ ('A2', 'cc:YCA')
+ ]
+"""
+
+# Notes about different kinds of merge.
+#
+# A basic 3-way merge is ...
+#
+# The ClearCase style of merge is a 3-way merge.
+#
+# The Subversion style of merge (that is, one phase of a Subversion merge)
+# is a three-way merge with its base (typically the YCA) on the source branch.
+
+
+import sys
+import pydot
+from pydot import Node, Edge
+
+
+def mergeinfo_to_node_list(mi):
+ """Convert a mergeinfo string such as '/foo:1,3-5*' into a list of
+ node names such as ['foo1', 'foo3', 'foo4', 'foo5'].
+ """
+ ### Doesn't yet strip the leading slash.
+ l = []
+ if mi:
+ for mi_str in mi.split(' '):
+ path, ranges = mi_str.split(':')
+ for r in ranges.split(','):
+ if r.endswith('*'):
+ # TODO: store & use this 'non-inheritable' flag
+ # Remove the flag
+ r = r[:-1]
+ rlist = r.split('-')
+ r1 = int(rlist[0])
+ if len(rlist) == 2:
+ r2 = int(rlist[1])
+ else:
+ r2 = r1
+ for rev in range(r1, r2 + 1):
+ l.append(path + str(rev))
+ return l
+
+
+class MergeGraph(pydot.Graph):
+ """Base class, not intended for direct use. Use MergeDot for the main
+ graph and MergeSubgraph for a subgraph.
+ """
+
+ def mk_origin_node(graph, name, label):
+ """Add a node to the graph"""
+ graph.add_node(Node(name, label=label, shape='plaintext'))
+
+ def mk_invis_node(graph, name):
+ """Add a node to the graph"""
+ graph.add_node(Node(name, style='invis'))
+
+ def mk_node(graph, name, label=None):
+ """Add a node to the graph, if not already present"""
+ if not graph.get_node(name):
+ if not label:
+ label = name
+ if name in graph.changes:
+ graph.add_node(Node(name, label=label))
+ else:
+ graph.add_node(Node(name, color='grey', label=''))
+
+ def mk_merge_target(graph, target_node, important):
+ """Add a merge target node to the graph."""
+ if important:
+ color = 'red'
+ else:
+ color = 'black'
+ graph.add_node(Node(target_node, color=color, fontcolor=color, style='bold'))
+
+ def mk_edge(graph, name1, name2, **attrs):
+ """Add an ordinary edge to the graph"""
+ graph.add_edge(Edge(name1, name2, dir='none', style='dotted', color='grey', **attrs))
+
+ def mk_br_edge(graph, name1, name2):
+ """Add a branch-creation edge to the graph"""
+ # Constraint=false to avoid the Y-shape skewing the nice parallel branch lines
+ graph.mk_edge(name1, name2, constraint='false')
+
+ def mk_merge_edge(graph, src_node, tgt_node, kind, label, important):
+ """Add a merge edge to the graph"""
+ if important:
+ color = 'red'
+ else:
+ color = 'grey'
+ e = Edge(src_node, tgt_node, constraint='false',
+ label='"' + label + '"',
+ color=color, fontcolor=color,
+ style='bold')
+ if kind.startswith('cherry'):
+ e.set_style('dashed')
+ graph.add_edge(e)
+
+ def mk_mergeinfo_edge(graph, base_node, src_node, important):
+ """"""
+ if important:
+ color = 'red'
+ else:
+ color = 'grey'
+ graph.add_edge(Edge(base_node, src_node,
+ dir='both', arrowtail='odot', arrowhead='tee',
+ color=color, constraint='false'))
+
+ def mk_invis_edge(graph, name1, name2):
+ """Add an invisible edge to the graph"""
+ graph.add_edge(Edge(name1, name2, style='invis'))
+
+ def add_merge(graph, merge, important):
+ """Add a merge"""
+ base_node, src_node, tgt_node, kind = merge
+
+ if base_node and src_node: # and not kind.startwith('cherry'):
+ graph.mk_mergeinfo_edge(base_node, src_node, important)
+
+ # Merge target node
+ graph.mk_merge_target(tgt_node, important)
+
+ # Merge edge
+ graph.mk_merge_edge(src_node, tgt_node, kind, kind, important)
+
+ def add_annotation(graph, node, label, color='lightblue'):
+ """Add a graph node that serves as an annotation to a normal node.
+ More than one annotation can be added to the same normal node.
+ """
+ subg_name = node + '_annotations'
+
+ def get_subgraph(graph, name):
+ """Equivalent to pydot.Graph.get_subgraph() when there is no more than
+ one subgraph of the given name, but working aroung a bug in
+ pydot.Graph.get_subgraph().
+ """
+ for subg in graph.get_subgraph_list():
+ if subg.get_name() == name:
+ return subg
+ return None
+
+ g = get_subgraph(graph, subg_name)
+ if not g:
+ g = pydot.Subgraph(subg_name, rank='same')
+ graph.add_subgraph(g)
+
+ ann_node = node + '_'
+ while g.get_node(ann_node):
+ ann_node = ann_node + '_'
+ g.add_node(Node(ann_node, shape='box', style='filled', color=color,
+ label='"' + label + '"'))
+ g.add_edge(Edge(ann_node, node, style='solid', color=color,
+ dir='none', constraint='false'))
+
+class MergeSubgraph(MergeGraph, pydot.Subgraph):
+ """"""
+ def __init__(graph, **attrs):
+ """"""
+ MergeGraph.__init__(graph)
+ pydot.Subgraph.__init__(graph, **attrs)
+
+class MergeDot(MergeGraph, pydot.Dot):
+ """
+ # TODO: In the 'merges' input, find the predecessor automatically.
+ """
+ def __init__(graph, config_filename=None,
+ filename=None, title=None, branches=None, changes=None,
+ merges=[], annotations=[], **attrs):
+ """Return a new MergeDot graph generated from a config file or args."""
+ MergeGraph.__init__(graph)
+ pydot.Dot.__init__(graph, **attrs)
+
+ if config_filename:
+ graph.read_config(config_filename)
+ else:
+ graph.filename = filename
+ graph.title = title
+ graph.branches = branches
+ graph.changes = changes
+ graph.merges = merges
+ graph.annotations = annotations
+
+ graph.construct()
+
+ def read_config(graph, config_filename):
+ """Initialize a MergeDot graph's input data from a config file."""
+ import ConfigParser
+ if config_filename.endswith('.txt'):
+ default_basename = config_filename[:-4]
+ else:
+ default_basename = config_filename
+
+ config = ConfigParser.SafeConfigParser({ 'basename': default_basename,
+ 'title': None,
+ 'merges': '[]',
+ 'annotations': '[]' })
+ files_read = config.read(config_filename)
+ if len(files_read) == 0:
+ sys.stderr.write('graph: unable to read graph config from "' + config_filename + '"\n')
+ sys.exit(1)
+ graph.basename = config.get('graph', 'basename')
+ graph.title = config.get('graph', 'title')
+ graph.branches = eval(config.get('graph', 'branches'))
+ graph.changes = eval(config.get('graph', 'changes'))
+ graph.merges = eval(config.get('graph', 'merges'))
+ graph.annotations = eval(config.get('graph', 'annotations'))
+
+ def construct(graph):
+ """"""
+ # Origin nodes (done first, in an attempt to set the order)
+ for br, orig, r1, head in graph.branches:
+ name = br + '0'
+ if r1 > 0:
+ graph.mk_origin_node(name, br)
+ else:
+ graph.mk_node(name, label=br)
+
+ # Edges and target nodes for merges
+ for merge in graph.merges:
+ # Emphasize the last merge, as it's the important one
+ important = (merge == graph.merges[-1])
+ graph.add_merge(merge, important)
+
+ # Parallel edges for basic lines of descent
+ for br, orig, r1, head in graph.branches:
+ sub_g = MergeSubgraph(ordering='out')
+ for i in range(1, head + 1):
+ prev_n = br + str(i - 1)
+ this_n = br + str(i)
+
+ # Normal edges and nodes
+ if i < r1:
+ graph.mk_invis_node(this_n)
+ else:
+ graph.mk_node(this_n)
+ if i <= r1:
+ graph.mk_invis_edge(prev_n, this_n)
+ else:
+ graph.mk_edge(prev_n, this_n)
+
+ # Branch creation edges
+ if orig:
+ sub_g.mk_br_edge(orig, br + str(r1))
+
+ graph.add_subgraph(sub_g)
+
+ # Annotations
+ for node, label in graph.annotations:
+ graph.add_annotation(node, label)
+
+ # A title for the graph (added last so it goes at the top)
+ if graph.title:
+ graph.add_node(Node('title', shape='plaintext', label='"' + graph.title + '"'))
+
+ def save(graph, format='png', filename=None):
+ """Save this merge graph to the given file format. If filename is None,
+ construct a filename from the basename of the original file (as passed
+ to the constructor and then stored in graph.basename) and the suffix
+ according to the given format.
+ """
+ if not filename:
+ filename = graph.basename + '.' + format
+ if format == 'sh':
+ import save_as_sh
+ save_as_sh.write_sh_file(graph, filename)
+ else:
+ pydot.Dot.write(graph, filename, format=format)
diff --git a/tools/dev/mergegraph/save_as_sh.py b/tools/dev/mergegraph/save_as_sh.py
new file mode 100644
index 0000000..c5124ca
--- /dev/null
+++ b/tools/dev/mergegraph/save_as_sh.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+
+# This module writes a sequence of 'svn' commands to a file, that when
+# run will perform the branching and merging described by a given MergeDot
+# graph description object.
+
+
+def shebang_line(out):
+ out.write('#!/bin/sh\n')
+
+def command(out, cmd, *args):
+ """Write the shell command CMD with the arguments ARGS to the file-like
+ object OUT.
+ """
+ out.write(' '.join((cmd,) + args) + "\n")
+
+def svn(out, subcmd, *args):
+ """Write an svn command with the given subcommand and arguments. Write
+ to the file-like object OUT.
+ """
+ command(out, 'svn', subcmd, *args)
+
+def comment(out, text):
+ """Write the comment TEXT to the file-like object OUT.
+ """
+ out.write('# %s\n' % text)
+
+def node_branch(node_name):
+ """Extract branch name from a node name.
+ ### TODO: multi-char names.
+ """
+ return node_name[:1]
+
+def node_url(node_name):
+ """Extract the URL (in command-line repo-relative URL syntax) from a
+ node name.
+ """
+ return '^/' + node_branch(node_name)
+
+def node_rev(node_name):
+ """Extract revnum (as an integer) from a node name.
+ ### TODO: multi-char names.
+ """
+ return int(node_name[1:]) + 1
+
+def add(revs, node_name, action, *args):
+ """Add the tuple (ACTION, (ARGS)) to the list REVS[REVNUM].
+ """
+ revnum = node_rev(node_name)
+ if not revnum in revs:
+ revs[revnum] = []
+ revs[revnum].append((action, args))
+
+def write_recipe(graph, out):
+ """Write out a sequence of svn commands that will execute the branching
+ and merging shown in GRAPH. Write to the file-like object OUT.
+ """
+ revs = {} # keyed by revnum
+
+ for br, orig, r1, head in graph.branches:
+ if orig:
+ add(revs, br + str(r1), 'copy', orig, br)
+ else:
+ add(revs, br + str(r1), 'mkproj', br)
+
+ for base_node, src_node, tgt_node, kind in graph.merges:
+ add(revs, tgt_node, 'merge', src_node, tgt_node, kind)
+
+ for node_name in graph.changes:
+ # Originally the 'changes' list could have entries that overlapped with
+ # merges. We must either disallow that or filter out such changes here.
+ #if not node_name in revs:
+ add(revs, node_name, 'modify', node_name)
+
+ # Execute the actions for each revision in turn.
+ for r in sorted(revs.keys()):
+ comment(out, 'start r' + str(r))
+ for action, params in revs[r]:
+ #comment(out, '(' + action + ' ' + params + ')')
+ if action == 'mkproj':
+ (br,) = params
+ svn(out, 'mkdir', br, br + '/created_in_' + br)
+ elif action == 'copy':
+ (orig, br) = params
+ svn(out, 'copy', '-r' + str(node_rev(orig)), node_branch(orig), br)
+ elif action == 'modify':
+ (node_name,) = params
+ svn(out, 'mkdir', node_branch(node_name) + '/new_in_' + node_name)
+ elif action == 'merge':
+ (src_node, tgt_node, kind) = params
+ assert node_rev(tgt_node) == r
+ svn(out, 'update')
+ if kind == 'cherry':
+ svn(out, 'merge',
+ '-c' + str(node_rev(src_node)), node_url(src_node),
+ node_branch(tgt_node))
+ elif kind.startswith('reint'):
+ svn(out, 'merge', '--reintegrate',
+ node_url(src_node) + '@' + str(node_rev(src_node)),
+ node_branch(tgt_node))
+ else:
+ svn(out, 'merge',
+ node_url(src_node) + '@' + str(node_rev(src_node)),
+ node_branch(tgt_node))
+ else:
+ raise Exception('unknown action: %s' % action)
+ svn(out, 'commit', '-m', 'r' + str(r))
+
+def write_sh_file(graph, filename):
+ """Write a file containing a sequence of 'svn' commands that when run will
+ perform the branching and merging described by the MergeDot object
+ GRAPH. Write to a new file named FILENAME.
+ """
+ out_stream = open(filename, 'w')
+ shebang_line(out_stream)
+ write_recipe(graph, out_stream)
+ out_stream.close()
diff --git a/tools/dev/min-includes.sh b/tools/dev/min-includes.sh
new file mode 100755
index 0000000..53dfb84
--- /dev/null
+++ b/tools/dev/min-includes.sh
@@ -0,0 +1,80 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Attempt to figure out the minimum set of includes for our header files.
+#
+# ### this is incomplete. it merely lists the header files in order from
+# ### "no dependencies on other svn headers" to the larger header files
+# ### which have dependencies. manually working through the headers in
+# ### this order will minimize includes.
+#
+# Each header file is test-compiled to ensure that it has enough headers.
+# Of course, this could be false-positive because another header that
+# has been included has further included something to enable compilation
+# of the header in question. More sophisticated testing (e.g. filtering
+# includes out of the included header) would be necessary for detection.
+#
+
+files="*.h private/*.h"
+deps="deps.$$"
+
+INCLUDES="-I. -I.. -I/usr/include/apr-1 -I/usr/include/apache2"
+
+rm -f "$deps"
+for f in $files ; do
+ sed -n "s%#include \"\(svn_[a-z0-9_]*\.h\)\".*%$f \1%p" $f | fgrep -v svn_private_config.h >> "$deps"
+done
+
+
+function process_file ()
+{
+ echo "Processing $header"
+
+ echo "#include \"$header\"" > "$deps".c
+ gcc -o /dev/null -S $INCLUDES "$deps".c
+
+ ### monkey the includes and recompile to find the minimal set
+}
+
+while test -s "$deps" ; do
+#wc -l $deps
+
+ for header in $files ; do
+
+ if grep -q "^$header" "$deps" ; then
+ continue
+ fi
+
+ process_file
+
+ fgrep -v "$header" "$deps" > "$deps".new
+ mv "$deps".new "$deps"
+
+ files="`echo $files | sed s%$header%%`"
+ break
+ done
+
+done
+
+for header in $files ; do
+ process_file
+done
diff --git a/tools/dev/mklog.py b/tools/dev/mklog.py
new file mode 100755
index 0000000..a60712e
--- /dev/null
+++ b/tools/dev/mklog.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Read a diff from stdin, and output a log message template to stdout.
+# Hint: It helps if the diff was generated using 'svn diff -x -p'
+#
+# Note: Don't completely trust the generated log message. This script
+# depends on the correct output of 'diff -x -p', which can sometimes get
+# confused.
+
+import sys, re
+
+rm = re.compile('@@.*@@ (.*)\(.*$')
+
+def main():
+ for line in sys.stdin:
+ if line[0:6] == 'Index:':
+ print('\n* %s' % line[7:-1])
+ prev_funcname = ''
+ continue
+ match = rm.search(line[:-1])
+ if match:
+ if prev_funcname == match.group(1):
+ continue
+ print(' (%s):' % match.group(1))
+ prev_funcname = match.group(1)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/mlpatch.py b/tools/dev/mlpatch.py
new file mode 100755
index 0000000..d74d820
--- /dev/null
+++ b/tools/dev/mlpatch.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# mlpatch.py: Run with no arguments for usage
+
+import sys, os
+import sgmllib
+try:
+ # Python >=3.0
+ from html.entities import entitydefs
+ from urllib.request import urlopen as urllib_request_urlopen
+except ImportError:
+ # Python <3.0
+ from htmlentitydefs import entitydefs
+ from urllib2 import urlopen as urllib_request_urlopen
+import fileinput
+
+CHUNKSIZE = 8 * 1024
+
+class MyParser(sgmllib.SGMLParser):
+ def __init__(self):
+ self.baseclass = sgmllib.SGMLParser
+ self.baseclass.__init__(self)
+ self.entitydefs = entitydefs
+ self.entitydefs["nbsp"] = " "
+ self.inbody = False
+ self.complete_line = False
+ self.discard_gathered()
+
+ def discard_gathered(self):
+ self.gather_data = False
+ self.gathered_data = ""
+
+ def noop(self):
+ pass
+
+ def out(self, data):
+ sys.stdout.write(data)
+
+ def handle_starttag(self, tag, method, attrs):
+ if not self.inbody: return
+ self.baseclass.handle_starttag(self, tag, method, attrs)
+
+ def handle_endtag(self, tag, method):
+ if not self.inbody: return
+ self.baseclass.handle_endtag(self, tag, method)
+
+ def handle_data(self, data):
+ if not self.inbody: return
+ data = data.replace('\n','')
+ if len(data) == 0: return
+ if self.gather_data:
+ self.gathered_data += data
+ else:
+ if self.complete_line:
+ if data[0] in ('+', '-', ' ', '#') \
+ or data.startswith("Index:") \
+ or data.startswith("@@ ") \
+ or data.startswith("======"):
+ # Real new line
+ self.out('\n')
+ else:
+ # Presume that we are wrapped
+ self.out(' ')
+ self.complete_line = False
+ self.out(data)
+
+ def handle_charref(self, ref):
+ if not self.inbody: return
+ self.baseclass.handle_charref(self, ref)
+
+ def handle_entityref(self, ref):
+ if not self.inbody: return
+ self.baseclass.handle_entityref(self, ref)
+
+ def handle_comment(self, comment):
+ if comment == ' body="start" ':
+ self.inbody = True
+ elif comment == ' body="end" ':
+ self.inbody = False
+
+ def handle_decl(self, data):
+ if not self.inbody: return
+ print("DECL: " + data)
+
+ def unknown_starttag(self, tag, attrs):
+ if not self.inbody: return
+ print("UNKTAG: %s %s" % (tag, attrs))
+
+ def unknown_endtag(self, tag):
+ if not self.inbody: return
+ print("UNKTAG: /%s" % (tag))
+
+ def do_br(self, attrs):
+ self.complete_line = True
+
+ def do_p(self, attrs):
+ if self.complete_line:
+ self.out('\n')
+ self.out(' ')
+ self.complete_line = True
+
+ def start_a(self, attrs):
+ self.gather_data = True
+
+ def end_a(self):
+ self.out(self.gathered_data.replace('_at_', '@'))
+ self.discard_gathered()
+
+ def close(self):
+ if self.complete_line:
+ self.out('\n')
+ self.baseclass.close(self)
+
+
+def main():
+ if len(sys.argv) == 1:
+ sys.stderr.write(
+ "usage: mlpatch.py dev|users year month msgno > foobar.patch\n" +
+ "example: mlpatch.py dev 2005 01 0001 > issue-XXXX.patch\n" +
+ """
+ Very annoyingly, the http://svn.haxx.se/ subversion mailing list archives
+ mangle inline patches, and provide no raw message download facility
+ (other than for an entire month's email as an mbox).
+
+ So, I wrote this script, to demangle them. It's not perfect, as it has to
+ guess about whitespace, but it does an acceptable job.\n""")
+ sys.exit(0)
+ elif len(sys.argv) != 5:
+ sys.stderr.write("error: mlpatch.py: Bad parameters - run with no "
+ + "parameters for usage\n")
+ sys.exit(1)
+ else:
+ list, year, month, msgno = sys.argv[1:]
+ url = "http://svn.haxx.se/" \
+ + "%(list)s/archive-%(year)s-%(month)s/%(msgno)s.shtml" % locals()
+ print("MsgUrl: " + url)
+ msgfile = urllib_request_urlopen(url)
+ p = MyParser()
+ buffer = msgfile.read(CHUNKSIZE)
+ while buffer:
+ p.feed(buffer)
+ buffer = msgfile.read(CHUNKSIZE)
+ p.close()
+ msgfile.close()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/normalize-dump.py b/tools/dev/normalize-dump.py
new file mode 100755
index 0000000..10cde4e
--- /dev/null
+++ b/tools/dev/normalize-dump.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import sys
+import re
+
+header_re = re.compile(r'^([^:]*): ?(.*)$')
+
+class NodePath:
+ def __init__(self, path, headers):
+ self.path = path
+ self.headers = headers
+
+ def dump(self):
+ print((' ' * 3) + self.path)
+ headers = sorted(self.headers.keys())
+ for header in headers:
+ print((' ' * 6) + header + ': ' + self.headers[header])
+
+
+def dump_revision(rev, nodepaths):
+ sys.stderr.write('* Normalizing revision ' + rev + '...')
+ print('Revision ' + rev)
+ paths = sorted(nodepaths.keys())
+ for path in paths:
+ nodepath = nodepaths[path]
+ nodepath.dump()
+ sys.stderr.write('done\n')
+
+
+
+def parse_header_block(fp):
+ headers = {}
+ while True:
+ line = fp.readline()
+ if line == '':
+ return headers, 1
+ line = line.strip()
+ if line == '':
+ return headers, 0
+ matches = header_re.match(line)
+ if not matches:
+ raise Exception('Malformed header block')
+ headers[matches.group(1)] = matches.group(2)
+
+
+def parse_file(fp):
+ nodepaths = {}
+ current_rev = None
+
+ while True:
+ # Parse a block of headers
+ headers, eof = parse_header_block(fp)
+
+ # This is a revision header block
+ if 'Revision-number' in headers:
+
+ # If there was a previous revision, dump it
+ if current_rev:
+ dump_revision(current_rev, nodepaths)
+
+ # Reset the data for this revision
+ current_rev = headers['Revision-number']
+ nodepaths = {}
+
+ # Skip the contents
+ prop_len = headers.get('Prop-content-length', 0)
+ fp.read(int(prop_len))
+
+ # This is a node header block
+ elif 'Node-path' in headers:
+
+ # Make a new NodePath object, and add it to the
+ # dictionary thereof
+ path = headers['Node-path']
+ node = NodePath(path, headers)
+ nodepaths[path] = node
+
+ # Skip the content
+ text_len = headers.get('Text-content-length', 0)
+ prop_len = headers.get('Prop-content-length', 0)
+ fp.read(int(text_len) + int(prop_len))
+
+ # Not a revision, not a node -- if we've already seen at least
+ # one revision block, we are in an errorful state.
+ elif current_rev and len(headers.keys()):
+ raise Exception('Header block from outta nowhere')
+
+ if eof:
+ if current_rev:
+ dump_revision(current_rev, nodepaths)
+ break
+
+def usage():
+ print('Usage: ' + sys.argv[0] + ' [DUMPFILE]')
+ print('')
+ print('Reads a Subversion dumpfile from DUMPFILE (or, if not provided,')
+ print('from stdin) and normalizes the metadata contained therein,')
+ print('printing summarized and sorted information. This is useful for')
+ print('generating data about dumpfiles in a diffable fashion.')
+ sys.exit(0)
+
+def main():
+ if len(sys.argv) > 1:
+ if sys.argv[1] == '--help':
+ usage()
+ fp = open(sys.argv[1], 'rb')
+ else:
+ fp = sys.stdin
+ parse_file(fp)
+
+
+if __name__ == '__main__':
+ main()
+
+
+
+
diff --git a/tools/dev/po-merge.py b/tools/dev/po-merge.py
new file mode 100755
index 0000000..e63a739
--- /dev/null
+++ b/tools/dev/po-merge.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import os, re, sys
+
+msgstr_re = re.compile('msgstr\[\d+\] "')
+
+def parse_translation(f):
+ """Read a single translation entry from the file F and return a
+ tuple with the comments, msgid, msgid_plural and msgstr. The comments is
+ returned as a list of lines which do not end in new-lines. The msgid is
+ string. The msgid_plural is string or None. The msgstr is a list of
+ strings. The msgid, msgid_plural and msgstr strings can contain embedded
+ newlines"""
+ line = f.readline()
+
+ # Parse comments
+ comments = []
+ while True:
+ if line.strip() == '' or line[:2] == '#~':
+ return comments, None, None, None
+ elif line[0] == '#':
+ comments.append(line[:-1])
+ else:
+ break
+ line = f.readline()
+
+ # Parse msgid
+ if line[:7] != 'msgid "' or line[-2] != '"':
+ raise RuntimeError("parse error")
+ msgid = line[6:-1]
+ while True:
+ line = f.readline()
+ if line[0] != '"':
+ break
+ msgid = msgid[:-1] + line[1:-1]
+
+ # Parse optional msgid_plural
+ msgid_plural = None
+ if line[:14] == 'msgid_plural "':
+ if line[-2] != '"':
+ raise RuntimeError("parse error")
+ msgid_plural = line[13:-1]
+ while True:
+ line = f.readline()
+ if line[0] != '"':
+ break
+ msgid_plural = msgid_plural[:-1] + line[1:-1]
+
+ # Parse msgstr
+ msgstr = []
+ if not msgid_plural:
+ if line[:8] != 'msgstr "' or line[-2] != '"':
+ raise RuntimeError("parse error")
+ msgstr.append(line[7:-1])
+ while True:
+ line = f.readline()
+ if len(line) == 0 or line[0] != '"':
+ break
+ msgstr[0] += '\n' + line[:-1]
+ else:
+ if line[:7] != 'msgstr[' or line[-2] != '"':
+ raise RuntimeError("parse error")
+ i = 0
+ while True:
+ matched_msgstr = msgstr_re.match(line)
+ if matched_msgstr:
+ matched_msgstr_len = len(matched_msgstr.group(0))
+ msgstr.append(line[matched_msgstr_len-1:-1])
+ else:
+ break
+ while True:
+ line = f.readline()
+ if len(line) == 0 or line[0] != '"':
+ break
+ msgstr[i] += '\n' + line[:-1]
+ i += 1
+
+ if line.strip() != '':
+ raise RuntimeError("parse error")
+
+ return comments, msgid, msgid_plural, msgstr
+
+def split_comments(comments):
+ """Split COMMENTS into flag comments and other comments. Flag
+ comments are those that begin with '#,', e.g. '#,fuzzy'."""
+ flags = []
+ other = []
+ for c in comments:
+ if len(c) > 1 and c[1] == ',':
+ flags.append(c)
+ else:
+ other.append(c)
+ return flags, other
+
+def main(argv):
+ if len(argv) != 2:
+ argv0 = os.path.basename(argv[0])
+ sys.exit('Usage: %s <lang.po>\n'
+ '\n'
+ 'This script will replace the translations and flags in lang.po (LF line endings)\n'
+ 'with the translations and flags in the source po file read from standard input.\n'
+ 'Strings that are not found in the source file are left untouched.\n'
+ 'A backup copy of lang.po is saved as lang.po.bak.\n'
+ '\n'
+ 'Example:\n'
+ ' svn cat http://svn.apache.org/repos/asf/subversion/trunk/subversion/po/sv.po | \\\n'
+ ' %s sv.po' % (argv0, argv0))
+
+ # Read the source po file into a hash
+ source = {}
+ while True:
+ comments, msgid, msgid_plural, msgstr = parse_translation(sys.stdin)
+ if not comments and msgid is None:
+ break
+ if msgid is not None:
+ source[msgid] = msgstr, split_comments(comments)[0]
+
+ # Make a backup of the output file, open the copy for reading
+ # and the original for writing.
+ os.rename(argv[1], argv[1] + '.bak')
+ infile = open(argv[1] + '.bak')
+ outfile = open(argv[1], 'w')
+
+ # Loop thought the original and replace stuff as we go
+ first = 1
+ string_count = 0
+ update_count = 0
+ untranslated = 0
+ fuzzy = 0
+ while True:
+ comments, msgid, msgid_plural, msgstr = parse_translation(infile)
+ if not comments and msgid is None:
+ break
+ if not first:
+ outfile.write('\n')
+ first = 0
+ if msgid is None:
+ outfile.write('\n'.join(comments) + '\n')
+ else:
+ string_count += 1
+ # Do not update the header, and only update if the source
+ # has a non-empty translation.
+ if msgid != '""' and source.get(msgid, ['""', []])[0] != '""':
+ other = split_comments(comments)[1]
+ new_msgstr, new_flags = source[msgid]
+ new_comments = other + new_flags
+ if new_msgstr != msgstr or new_comments != comments:
+ update_count += 1
+ msgstr = new_msgstr
+ comments = new_comments
+ outfile.write('\n'.join(comments) + '\n')
+ outfile.write('msgid ' + msgid + '\n')
+ if not msgid_plural:
+ outfile.write('msgstr ' + msgstr[0] + '\n')
+ else:
+ outfile.write('msgid_plural ' + msgid_plural + '\n')
+ n = 0
+ for i in msgstr:
+ outfile.write('msgstr[%s] %s\n' % (n, msgstr[n]))
+ n += 1
+ if msgstr is not None:
+ for m in msgstr:
+ if m == '""':
+ untranslated += 1
+ for c in comments:
+ if c.startswith('#,') and 'fuzzy' in c.split(', '):
+ fuzzy += 1
+
+ # We're done. Tell the user what we did.
+ print(('%d strings updated. '
+ '%d fuzzy strings. '
+ '%d of %d strings are still untranslated (%.0f%%).' %
+ (update_count, fuzzy, untranslated, string_count,
+ 100.0 * untranslated / string_count)))
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/tools/dev/prebuild-cleanup.sh b/tools/dev/prebuild-cleanup.sh
new file mode 100755
index 0000000..2ef6b80
--- /dev/null
+++ b/tools/dev/prebuild-cleanup.sh
@@ -0,0 +1,45 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+### Purify a system, to simulate building Subversion on a "clean" box.
+###
+### You'll probably need to run this as `root', and may need to change
+### some paths for your system.
+
+# Clean out old apr, apr-util config scripts.
+rm /usr/local/bin/apr-config
+rm /usr/local/bin/apu-config
+
+# Clean out libs.
+rm -f /usr/local/lib/APRVARS
+rm -f /usr/local/lib/libapr*
+rm -f /usr/local/lib/libexpat*
+rm -f /usr/local/lib/libneon*
+rm -f /usr/local/lib/libsvn*
+
+# Clean out headers.
+rm -f /usr/local/include/apr*
+rm -f /usr/local/include/svn*
+rm -f /usr/local/include/neon/*
+
+### Not sure this would be useful:
+# rm -f /usr/local/apache2/lib/*
diff --git a/tools/dev/random-commits.py b/tools/dev/random-commits.py
new file mode 100755
index 0000000..a243990
--- /dev/null
+++ b/tools/dev/random-commits.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: random-commits.py
+#
+# Using the FILELIST (see config below), a series of COUNT commits will be
+# constructed, each changing up to MAXFILES files per commit. The commands
+# will be sent to stdout (formatted as a shell script).
+#
+# The FILELIST can be constructed using the find-textfiles script.
+#
+
+import random
+
+FILELIST = 'textfiles'
+COUNT = 1000 # this many commits
+MAXFILES = 10 # up to 10 files at a time
+
+files = open(FILELIST).readlines()
+
+print('#!/bin/sh')
+
+for i in range(COUNT):
+ n = random.randrange(1, MAXFILES+1)
+ l = [ ]
+ print("echo '--- begin commit #%d -----------------------------------'" % (i+1,))
+ for j in range(n):
+ fname = random.choice(files)[:-1] # strip trailing newline
+ print("echo 'part of change #%d' >> %s" % (i+1, fname))
+ l.append(fname)
+ print("svn commit -m 'commit #%d' %s" % (i+1, ' '.join(l)))
diff --git a/tools/dev/remove-trailing-whitespace.sh b/tools/dev/remove-trailing-whitespace.sh
new file mode 100755
index 0000000..1dbde0c
--- /dev/null
+++ b/tools/dev/remove-trailing-whitespace.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+ for ext in c h cpp java py pl rb hpp cmd bat sql sh; do
+ find . -name "*.$ext" -not -type l -exec \
+ perl -pi -e 's/[ \t]*$//' {} + ;
+ # don't use \s to not strip ^L pagebreaks
+ done
diff --git a/tools/dev/sbox-ospath.py b/tools/dev/sbox-ospath.py
new file mode 100755
index 0000000..9e38755
--- /dev/null
+++ b/tools/dev/sbox-ospath.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# USAGE:
+# $ ./sbox-ospath.py FILENAME
+#
+# This script will look for all lines in the file that use an expression
+# that looks like:
+# os.path.join(wc_dir, 'A', 'B')
+#
+# and rewrite that to:
+# sbox.ospath('A/B')
+#
+# Obviously, this relies heavily on standard naming for the variables in
+# our testing code. Visual inspection (and execution!) should be performed.
+#
+# The file is rewritten in place.
+#
+
+import sys
+import os
+import re
+
+RE_FIND_JOIN = re.compile(r'os\.path\.join\((?:sbox\.)?wc_dir, '
+ r'(["\'][^"\']*["\'](?:, ["\'][^"\']*["\'])*)\)')
+
+
+def rewrite_file(fname):
+ count = 0
+ lines = open(fname).readlines()
+ for i in range(len(lines)):
+ line = lines[i]
+ match = RE_FIND_JOIN.search(line)
+ if match:
+ start, end = match.span()
+ parts = match.group(1).replace('"', "'").replace("', '", '/')
+ lines[i] = line[:start] + 'sbox.ospath(' + parts + ')' + line[end:]
+ count += 1
+ if count == 0:
+ print('No changes.')
+ else:
+ open(fname, 'w').writelines(lines)
+ print('%s rewrites performed.' % (count,))
+
+
+if __name__ == '__main__':
+ rewrite_file(sys.argv[1])
diff --git a/tools/dev/scramble-tree.py b/tools/dev/scramble-tree.py
new file mode 100755
index 0000000..7857aad
--- /dev/null
+++ b/tools/dev/scramble-tree.py
@@ -0,0 +1,304 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# scramble-tree.py: (See scramble-tree.py --help.)
+#
+# Makes multiple random file changes to a directory tree, for testing.
+#
+# This script will add some new files, remove some existing files, add
+# text to some existing files, and delete text from some existing
+# files. It will also leave some files completely untouched.
+#
+# The exact set of changes made is always the same for identical trees,
+# where "identical" means the names of files and directories are the
+# same, and they are arranged in the same tree structure (the actual
+# contents of files may differ). If two are not identical, the sets of
+# changes scramble-tree.py will make may differ arbitrarily.
+#
+# Directories named .svn/ and CVS/ are ignored.
+#
+# Example scenario, starting with a pristine Subversion working copy:
+#
+# $ ls
+# foo/
+# $ svn st foo
+# $ cp -r foo bar
+# $ svn st bar
+# $ scramble-tree.py foo
+# $ svn st foo
+# [... see lots of scary status output ...]
+# $ scramble-tree.py bar
+# [... see the exact same scary status output ...]
+# $ scramble-tree.py foo
+# [... see a new bunch of scary status output ...]
+# $
+
+import os
+import sys
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import random
+from hashlib import md5 as hashlib_md5
+import base64
+
+
+class VCActions:
+ def __init__(self):
+ pass
+ def add_file(self, path):
+ """Add an existing file to version control."""
+ pass
+ def remove_file(self, path):
+ """Remove an existing file from version control, and delete it."""
+ pass
+
+
+class NoVCActions(VCActions):
+ def remove_file(self, path):
+ os.unlink(path)
+
+
+class CVSActions(VCActions):
+ def add_file(self, path):
+ cwd = os.getcwd()
+ try:
+ dirname, basename = os.path.split(path)
+ os.chdir(os.path.join(cwd, dirname))
+ os.system('cvs -Q add -m "Adding file to repository" "%s"' % (basename))
+ finally:
+ os.chdir(cwd)
+ def remove_file(self, path):
+ cwd = os.getcwd()
+ try:
+ dirname, basename = os.path.split(path)
+ os.chdir(os.path.join(cwd, dirname))
+ os.system('cvs -Q rm -f "%s"' % (basename))
+ finally:
+ os.chdir(cwd)
+
+
+class SVNActions(VCActions):
+ def add_file(self, path):
+ os.system('svn add --quiet "%s"' % (path))
+ def remove_file(self, path):
+ os.remove(path)
+ os.system('svn rm --quiet --force "%s"' % (path))
+
+
+class hashDir:
+ """Given a directory, creates a string containing all directories
+ and files under that directory (sorted alphanumerically) and makes a
+ base64-encoded md5 hash of the resulting string. Call
+ hashDir.gen_seed() to generate a seed value for this tree."""
+
+ def __init__(self, rootdir):
+ self.allfiles = []
+ for dirpath, dirs, files in os.walk(rootdir):
+ self.walker_callback(len(rootdir), dirpath, dirs + files)
+
+ def gen_seed(self):
+ # Return a base64-encoded (kinda ... strip the '==\n' from the
+ # end) MD5 hash of sorted tree listing.
+ self.allfiles.sort()
+ return base64.encodestring(hashlib_md5(''.join(self.allfiles)).digest())[:-3]
+
+ def walker_callback(self, baselen, dirname, fnames):
+ if ((dirname == '.svn') or (dirname == 'CVS')):
+ return
+ self.allfiles.append(dirname[baselen:])
+ for filename in fnames:
+ path = os.path.join(dirname, filename)
+ if not os.path.isdir(path):
+ self.allfiles.append(path[baselen:])
+
+
+class Scrambler:
+ def __init__(self, seed, vc_actions, dry_run, quiet):
+ if not quiet:
+ print('SEED: ' + seed)
+
+ self.rand = random.Random(seed)
+ self.vc_actions = vc_actions
+ self.dry_run = dry_run
+ self.quiet = quiet
+ self.ops = [] ### ["add" | "munge", path]
+ self.greeking = """
+======================================================================
+This is some text that was inserted into this file by the lovely and
+talented scramble-tree.py script.
+======================================================================
+"""
+
+ ### Helpers
+ def shrink_list(self, list, remove_count):
+ if len(list) <= remove_count:
+ return []
+ for i in range(remove_count):
+ j = self.rand.randrange(len(list) - 1)
+ del list[j]
+ return list
+
+ def _make_new_file(self, dir):
+ i = 0
+ path = None
+ for i in range(99999):
+ path = os.path.join(dir, "newfile.%05d.txt" % i)
+ if not os.path.exists(path):
+ open(path, 'w').write(self.greeking)
+ return path
+ raise Exception("Ran out of unique new filenames in directory '%s'" % dir)
+
+ ### File Mungers
+ def _mod_append_to_file(self, path):
+ if not self.quiet:
+ print('append_to_file: %s' % path)
+ if self.dry_run:
+ return
+ fh = open(path, "a")
+ fh.write(self.greeking)
+ fh.close()
+
+ def _mod_remove_from_file(self, path):
+ if not self.quiet:
+ print('remove_from_file: %s' % path)
+ if self.dry_run:
+ return
+ lines = self.shrink_list(open(path, "r").readlines(), 5)
+ open(path, "w").writelines(lines)
+
+ def _mod_delete_file(self, path):
+ if not self.quiet:
+ print('delete_file: %s' % path)
+ if self.dry_run:
+ return
+ self.vc_actions.remove_file(path)
+
+ ### Public Interfaces
+ def get_randomizer(self):
+ return self.rand
+
+ def schedule_munge(self, path):
+ self.ops.append(tuple(["munge", path]))
+
+ def schedule_addition(self, dir):
+ self.ops.append(tuple(["add", dir]))
+
+ def enact(self, limit):
+ num_ops = len(self.ops)
+ if limit == 0:
+ return
+ elif limit > 0 and limit <= num_ops:
+ self.ops = self.shrink_list(self.ops, num_ops - limit)
+ for op, path in self.ops:
+ if op == "add":
+ path = self._make_new_file(path)
+ if not self.quiet:
+ print("add_file: %s" % path)
+ if self.dry_run:
+ return
+ self.vc_actions.add_file(path)
+ elif op == "munge":
+ file_mungers = [self._mod_append_to_file,
+ self._mod_append_to_file,
+ self._mod_append_to_file,
+ self._mod_remove_from_file,
+ self._mod_remove_from_file,
+ self._mod_remove_from_file,
+ self._mod_delete_file,
+ ]
+ self.rand.choice(file_mungers)(path)
+
+
+def usage(retcode=255):
+ print('Usage: %s [OPTIONS] DIRECTORY' % (sys.argv[0]))
+ print('')
+ print('Options:')
+ print(' --help, -h : Show this usage message.')
+ print(' --seed ARG : Use seed ARG to scramble the tree.')
+ print(' --use-svn : Use Subversion (as "svn") to perform file additions')
+ print(' and removals.')
+ print(' --use-cvs : Use CVS (as "cvs") to perform file additions')
+ print(' and removals.')
+ print(' --dry-run : Don\'t actually change the disk.')
+ print(' --limit N : Limit the scrambling to a maximum of N operations.')
+ print(' --quiet, -q : Run in stealth mode!')
+ sys.exit(retcode)
+
+
+def walker_callback(scrambler, dirname, fnames):
+ if ((dirname.find('.svn') != -1) or dirname.find('CVS') != -1):
+ return
+ rand = scrambler.get_randomizer()
+ if rand.randrange(5) == 1:
+ scrambler.schedule_addition(dirname)
+ for filename in fnames:
+ path = os.path.join(dirname, filename)
+ if not os.path.isdir(path) and rand.randrange(3) == 1:
+ scrambler.schedule_munge(path)
+
+
+def main():
+ seed = None
+ vc_actions = NoVCActions()
+ dry_run = 0
+ quiet = 0
+ limit = None
+
+ # Mm... option parsing.
+ optlist, args = my_getopt(sys.argv[1:], "hq",
+ ['seed=', 'use-svn', 'use-cvs',
+ 'help', 'quiet', 'dry-run', 'limit='])
+ for opt, arg in optlist:
+ if opt == '--help' or opt == '-h':
+ usage(0)
+ if opt == '--seed':
+ seed = arg
+ if opt == '--use-svn':
+ vc_actions = SVNActions()
+ if opt == '--use-cvs':
+ vc_actions = CVSActions()
+ if opt == '--dry-run':
+ dry_run = 1
+ if opt == '--limit':
+ limit = int(arg)
+ if opt == '--quiet' or opt == '-q':
+ quiet = 1
+
+ # We need at least a path to work with, here.
+ argc = len(args)
+ if argc < 1 or argc > 1:
+ usage()
+ rootdir = args[0]
+
+ # If a seed wasn't provide, calculate one.
+ if seed is None:
+ seed = hashDir(rootdir).gen_seed()
+ scrambler = Scrambler(seed, vc_actions, dry_run, quiet)
+ for dirpath, dirs, files in os.walk(rootdir):
+ walker_callback(scrambler, dirpath, dirs + files)
+ scrambler.enact(limit)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/stress.pl b/tools/dev/stress.pl
new file mode 100755
index 0000000..5b76be3
--- /dev/null
+++ b/tools/dev/stress.pl
@@ -0,0 +1,498 @@
+#!/usr/bin/perl -w
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# A script that allows some simple testing of Subversion, in
+# particular concurrent read, write and read-write access by the 'svn'
+# client. It can also create working copy trees containing a large
+# number of files and directories. All repository access is via the
+# 'svnadmin' and 'svn' commands.
+#
+# This script constructs a repository, and populates it with
+# files. Then it loops making changes to a subset of the files and
+# committing the tree. Thus when two, or more, instances are run in
+# parallel there is concurrent read and write access. Sometimes a
+# commit will fail due to a commit conflict. This is expected, and is
+# automatically resolved by updating the working copy.
+#
+# Each file starts off containing:
+# A0
+# 0
+# A1
+# 1
+# A2
+# .
+# .
+# A9
+# 9
+#
+# The script runs with an ID in the range 0-9, and when it modifies a
+# file it modifes the line that starts with its ID. Thus scripts with
+# different IDs will make changes that can be merged automatically.
+#
+# The main loop is then:
+#
+# step 1: modify a random selection of files
+#
+# step 2: optional sleep or wait for RETURN keypress
+#
+# step 3: update the working copy automatically merging out-of-date files
+#
+# step 4: try to commit, if not successful go to step 3 otherwise go to step 1
+#
+# To allow break-out of potentially infinite loops, the script will
+# terminate if it detects the presence of a "stop file", the path to
+# which is specified with the -S option (default ./stop). This allows
+# the script to be stopped without any danger of interrupting an 'svn'
+# command, which experiment shows may require Berkeley db_recover to
+# be used on the repository.
+#
+# Running the Script
+# ==================
+#
+# Use three xterms all with shells on the same directory. In the
+# first xterm run (note, this will remove anything called repostress
+# in the current directory)
+#
+# % stress.pl -c -s1
+#
+# When the message "Committed revision 1." scrolls pass use the second
+# xterm to run
+#
+# % stress.pl -s1
+#
+# Both xterms will modify, update and commit separate working copies to
+# the same repository.
+#
+# Use the third xterm to touch a file 'stop' to cause the scripts to
+# exit cleanly, i.e. without interrupting an svn command.
+#
+# To run a third, fourth, etc. instance of the script use -i
+#
+# % stress.pl -s1 -i2
+# % stress.pl -s1 -i3
+#
+# Running several instances at once will cause a *lot* of disk
+# activity. I have run ten instances simultaneously on a Linux tmpfs
+# (RAM based) filesystem -- watching ten xterms scroll irregularly
+# can be quite hypnotic!
+
+use strict;
+use IPC::Open3;
+use Getopt::Std;
+use File::Find;
+use File::Path;
+use File::Spec::Functions;
+use Cwd;
+
+# The name of this script, for error messages.
+my $stress = 'stress.pl';
+
+# When testing BDB 4.4 and later with DB_RECOVER enabled, the criteria
+# for a failed update and commit are a bit looser than otherwise.
+my $dbrecover = undef;
+
+# Repository check/create
+sub init_repo
+ {
+ my ( $repo, $create, $no_sync, $fsfs ) = @_;
+ if ( $create )
+ {
+ rmtree([$repo]) if -e $repo;
+ my $svnadmin_cmd = "svnadmin create $repo";
+ $svnadmin_cmd .= " --fs-type bdb" if not $fsfs;
+ $svnadmin_cmd .= " --bdb-txn-nosync" if $no_sync;
+ system( $svnadmin_cmd) and die "$stress: $svnadmin_cmd: failed: $?\n";
+ open ( CONF, ">>$repo/conf/svnserve.conf")
+ or die "$stress: open svnserve.conf: $!\n";
+ print CONF "[general]\nanon-access = write\n";
+ close CONF or die "$stress: close svnserve.conf: $!\n";
+ }
+ $repo = getcwd . "/$repo" if not file_name_is_absolute $repo;
+ $dbrecover = 1 if -e "$repo/db/__db.register";
+ print "$stress: BDB automatic database recovery enabled\n" if $dbrecover;
+ return $repo;
+ }
+
+# Check-out a working copy
+sub check_out
+ {
+ my ( $url, $options ) = @_;
+ my $wc_dir = "wcstress.$$";
+ mkdir "$wc_dir", 0755 or die "$stress: mkdir wcstress.$$: $!\n";
+ my $svn_cmd = "svn co $url $wc_dir $options";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+ return $wc_dir;
+ }
+
+# Print status and update. The update is to do any required merges.
+sub status_update
+ {
+ my ( $options, $wc_dir, $wait_for_key, $disable_status,
+ $resolve_conflicts ) = @_;
+ my $svn_cmd = "svn st -u $options $wc_dir";
+ if ( not $disable_status ) {
+ print "Status:\n";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+ }
+ print "Press return to update/commit\n" if $wait_for_key;
+ read STDIN, $wait_for_key, 1 if $wait_for_key;
+ print "Updating:\n";
+ $svn_cmd = "svn up --non-interactive $options $wc_dir";
+
+ # Check for conflicts during the update. If any exist, we resolve them.
+ my $pid = open3(\*UPDATE_WRITE, \*UPDATE_READ, \*UPDATE_ERR_READ,
+ $svn_cmd);
+ my @conflicts = ();
+ while ( <UPDATE_READ> )
+ {
+ print;
+ s/\r*$//; # [Windows compat] Remove trailing \r's
+ if ( /^C (.*)$/ )
+ {
+ push(@conflicts, ($1))
+ }
+ }
+
+ # Print any errors.
+ my $acceptable_error = 0;
+ while ( <UPDATE_ERR_READ> )
+ {
+ print;
+ if ($dbrecover)
+ {
+ s/\r*$//; # [Windows compat] Remove trailing \r's
+ $acceptable_error = 1 if ( /^svn:[ ]
+ (
+ bdb:[ ]PANIC
+ |
+ DB_RUNRECOVERY
+ )
+ /x );
+ }
+ }
+
+ # Close up the streams.
+ close UPDATE_ERR_READ or die "$stress: close UPDATE_ERR_READ: $!\n";
+ close UPDATE_WRITE or die "$stress: close UPDATE_WRITE: $!\n";
+ close UPDATE_READ or die "$stress: close UPDATE_READ: $!\n";
+
+ # Get commit subprocess exit status
+ die "$stress: waitpid: $!\n" if $pid != waitpid $pid, 0;
+ die "$stress: unexpected update fail: exit status: $?\n"
+ unless $? == 0 or ( $? == 256 and $acceptable_error );
+
+ if ($resolve_conflicts)
+ {
+ foreach my $conflict (@conflicts)
+ {
+ $svn_cmd = "svn resolved $conflict";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+ }
+ }
+ }
+
+# Print status, update and commit. The update is to do any required
+# merges. Returns 0 if the commit succeeds and 1 if it fails due to a
+# conflict.
+sub status_update_commit
+ {
+ my ( $options, $wc_dir, $wait_for_key, $disable_status,
+ $resolve_conflicts ) = @_;
+ status_update $options, $wc_dir, $wait_for_key, $disable_status, \
+ $resolve_conflicts;
+ print "Committing:\n";
+ # Use current time as log message
+ my $now_time = localtime;
+ # [Windows compat] Must use double quotes for the log message.
+ my $svn_cmd = "svn ci $options $wc_dir -m \"$now_time\"";
+
+ # Need to handle the commit carefully. It could fail for all sorts
+ # of reasons, but errors that indicate a conflict are "acceptable"
+ # while other errors are not. Thus there is a need to check the
+ # return value and parse the error text.
+ my $pid = open3(\*COMMIT_WRITE, \*COMMIT_READ, \*COMMIT_ERR_READ,
+ $svn_cmd);
+ print while ( <COMMIT_READ> );
+
+ # Look for acceptable errors, ones we expect to occur due to conflicts
+ my $acceptable_error = 0;
+ while ( <COMMIT_ERR_READ> )
+ {
+ print;
+ s/\r*$//; # [Windows compat] Remove trailing \r's
+ $acceptable_error = 1 if ( /^svn:[ ]
+ (
+ .*out[ ]of[ ]date
+ |
+ Conflict[ ]at
+ |
+ Baseline[ ]incorrect
+ |
+ )
+ /ix )
+ or ( $dbrecover and ( /^svn:[ ]
+ (
+ bdb:[ ]PANIC
+ |
+ DB_RUNRECOVERY
+ )
+ /x ));
+
+
+ }
+ close COMMIT_ERR_READ or die "$stress: close COMMIT_ERR_READ: $!\n";
+ close COMMIT_WRITE or die "$stress: close COMMIT_WRITE: $!\n";
+ close COMMIT_READ or die "$stress: close COMMIT_READ: $!\n";
+
+ # Get commit subprocess exit status
+ die "$stress: waitpid: $!\n" if $pid != waitpid $pid, 0;
+ die "$stress: unexpected commit fail: exit status: $?\n"
+ if ( $? != 0 and $? != 256 ) or ( $? == 256 and $acceptable_error != 1 );
+
+ return $? == 256 ? 1 : 0;
+ }
+
+# Get a list of all versioned files in the working copy
+{
+ my @get_list_of_files_helper_array;
+ sub GetListOfFilesHelper
+ {
+ $File::Find::prune = 1 if $File::Find::name =~ m[/.svn];
+ return if $File::Find::prune or -d;
+ push @get_list_of_files_helper_array, $File::Find::name;
+ }
+ sub GetListOfFiles
+ {
+ my ( $wc_dir ) = @_;
+ @get_list_of_files_helper_array = ();
+ find( \&GetListOfFilesHelper, $wc_dir);
+ return @get_list_of_files_helper_array;
+ }
+}
+
+# Populate a working copy
+sub populate
+ {
+ my ( $dir, $dir_width, $file_width, $depth, $pad, $props ) = @_;
+ return if not $depth--;
+
+ for my $nfile ( 1..$file_width )
+ {
+ my $filename = "$dir/foo$nfile";
+ open( FOO, ">$filename" ) or die "$stress: open $filename: $!\n";
+
+ for my $line ( 0..9 )
+ {
+ print FOO "A$line\n$line\n"
+ or die "$stress: write to $filename: $!\n";
+ map { print FOO $_ x 255, "\n"; } ("a", "b", "c", "d")
+ foreach (1..$pad);
+ }
+ print FOO "\$HeadURL: \$\n"
+ or die "$stress: write to $filename: $!\n" if $props;
+ close FOO or die "$stress: close $filename: $!\n";
+
+ my $svn_cmd = "svn add $filename";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+
+ if ( $props )
+ {
+ $svn_cmd = "svn propset svn:eol-style native $filename";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+
+ $svn_cmd = "svn propset svn:keywords HeadURL $filename";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+ }
+ }
+
+ if ( $depth )
+ {
+ for my $ndir ( 1..$dir_width )
+ {
+ my $dirname = "$dir/bar$ndir";
+ my $svn_cmd = "svn mkdir $dirname";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+
+ populate( "$dirname", $dir_width, $file_width, $depth, $pad,
+ $props );
+ }
+ }
+ }
+
+# Modify a versioned file in the working copy
+sub ModFile
+ {
+ my ( $filename, $mod_number, $id ) = @_;
+
+ # Read file into memory replacing the line that starts with our ID
+ open( FOO, "<$filename" ) or die "$stress: open $filename: $!\n";
+ my @lines = map { s[(^$id.*)][$1,$mod_number]; $_ } <FOO>;
+ close FOO or die "$stress: close $filename: $!\n";
+
+ # Write the memory back to the file
+ open( FOO, ">$filename" ) or die "$stress: open $filename: $!\n";
+ print FOO or die "$stress: print $filename: $!\n" foreach @lines;
+ close FOO or die "$stress: close $filename: $!\n";
+ }
+
+sub ParseCommandLine
+ {
+ my %cmd_opts;
+ my $usage = "
+usage: stress.pl [-cdfhprW] [-i num] [-n num] [-s secs] [-x num] [-o options]
+ [-D num] [-F num] [-N num] [-P num] [-R path] [-S path]
+ [-U url]
+
+where
+ -c cause repository creation
+ -d don't make the status calls
+ -f use --fs-type fsfs during repository creation
+ -h show this help information (other options will be ignored)
+ -i the ID (valid IDs are 0 to 9, default is 0 if -c given, 1 otherwise)
+ -n the number of sets of changes to commit
+ -p add svn:eol-style and svn:keywords properties to the files
+ -r perform update-time conflict resolution
+ -s the sleep delay (-1 wait for key, 0 none)
+ -x the number of files to modify in each commit
+ -o options to pass for subversion client
+ -D the number of sub-directories per directory in the tree
+ -F the number of files per directory in the tree
+ -N the depth of the tree
+ -P the number of 10K blocks with which to pad the file
+ -R the path to the repository
+ -S the path to the file whose presence stops this script
+ -U the URL to the repository (file:///<-R path> by default)
+ -W use --bdb-txn-nosync during repository creation
+";
+
+ # defaults
+ $cmd_opts{'D'} = 2; # number of subdirs per dir
+ $cmd_opts{'F'} = 2; # number of files per dir
+ $cmd_opts{'N'} = 2; # depth
+ $cmd_opts{'P'} = 0; # padding blocks
+ $cmd_opts{'R'} = "repostress"; # repository name
+ $cmd_opts{'S'} = "stop"; # path of file to stop the script
+ $cmd_opts{'U'} = "none"; # URL
+ $cmd_opts{'W'} = 0; # create with --bdb-txn-nosync
+ $cmd_opts{'c'} = 0; # create repository
+ $cmd_opts{'d'} = 0; # disable status
+ $cmd_opts{'f'} = 0; # create with --fs-type fsfs
+ $cmd_opts{'h'} = 0; # help
+ $cmd_opts{'i'} = 0; # ID
+ $cmd_opts{'n'} = 200; # sets of changes
+ $cmd_opts{'p'} = 0; # add file properties
+ $cmd_opts{'r'} = 0; # conflict resolution
+ $cmd_opts{'s'} = -1; # sleep interval
+ $cmd_opts{'x'} = 4; # files to modify
+ $cmd_opts{'o'} = ""; # no options passed
+
+ getopts( 'cdfhi:n:prs:x:o:D:F:N:P:R:S:U:W', \%cmd_opts ) or die $usage;
+
+ # print help info (and exit nicely) if requested
+ if ( $cmd_opts{'h'} )
+ {
+ print( $usage );
+ exit 0;
+ }
+
+ # default ID if not set
+ $cmd_opts{'i'} = 1 - $cmd_opts{'c'} if not $cmd_opts{'i'};
+ die $usage if $cmd_opts{'i'} !~ /^[0-9]$/;
+
+ return %cmd_opts;
+ }
+
+############################################################################
+# Main
+
+# Why the fixed seed? I use this script for more than stress testing,
+# I also use it to create test repositories. When creating a test
+# repository, while I don't care exactly which files get modified, I
+# find it useful for the repositories to be reproducible, i.e. to have
+# the same files modified each time. When using this script for
+# stress testing one could remove this fixed seed and Perl will
+# automatically use a pseudo-random seed. However it doesn't much
+# matter, the stress testing really depends on the real-time timing
+# differences between mutiple instances of the script, rather than the
+# randomness of the chosen files.
+srand 123456789;
+
+my %cmd_opts = ParseCommandLine();
+
+my $repo = init_repo( $cmd_opts{'R'}, $cmd_opts{'c'}, $cmd_opts{'W'},
+ $cmd_opts{'f'} );
+
+# [Windows compat]
+# Replace backslashes in the path, and tweak the number of slashes
+# in the scheme separator to make the URL always correct.
+my $urlsep = ($repo =~ m/^\// ? '//' : '///');
+$repo =~ s/\\/\//g;
+
+# Make URL from path if URL not explicitly specified
+$cmd_opts{'U'} = "file:$urlsep$repo" if $cmd_opts{'U'} eq "none";
+
+my $wc_dir = check_out $cmd_opts{'U'}, $cmd_opts{'o'};
+
+if ( $cmd_opts{'c'} )
+ {
+ my $svn_cmd = "svn mkdir $wc_dir/trunk";
+ system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n";
+ populate( "$wc_dir/trunk", $cmd_opts{'D'}, $cmd_opts{'F'}, $cmd_opts{'N'},
+ $cmd_opts{'P'}, $cmd_opts{'p'} );
+ status_update_commit $cmd_opts{'o'}, $wc_dir, 0, 1
+ and die "$stress: populate checkin failed\n";
+ }
+
+my @wc_files = GetListOfFiles $wc_dir;
+die "$stress: not enough files in repository\n"
+ if $#wc_files + 1 < $cmd_opts{'x'};
+
+my $wait_for_key = $cmd_opts{'s'} < 0;
+
+my $stop_file = $cmd_opts{'S'};
+
+for my $mod_number ( 1..$cmd_opts{'n'} )
+ {
+ my @chosen;
+ for ( 1..$cmd_opts{'x'} )
+ {
+ # Extract random file from list and modify it
+ my $mod_file = splice @wc_files, int rand $#wc_files, 1;
+ ModFile $mod_file, $mod_number, $cmd_opts{'i'};
+ push @chosen, $mod_file;
+ }
+ # Reinstate list of files, the order doesn't matter
+ push @wc_files, @chosen;
+
+ if ( $cmd_opts{'x'} > 0 ) {
+ # Loop committing until successful or the stop file is created
+ 1 while not -e $stop_file
+ and status_update_commit $cmd_opts{'o'}, $wc_dir, $wait_for_key, \
+ $cmd_opts{'d'}, $cmd_opts{'r'};
+ } else {
+ status_update $cmd_opts{'o'}, $wc_dir, $wait_for_key, $cmd_opts{'d'}, \
+ $cmd_opts{'r'};
+ }
+
+ # Break out of loop, or sleep, if required
+ print( "stop file '$stop_file' detected\n" ), last if -e $stop_file;
+ sleep $cmd_opts{'s'} if $cmd_opts{'s'} > 0;
+ }
+
diff --git a/tools/dev/svn-dev.el b/tools/dev/svn-dev.el
new file mode 100644
index 0000000..2fc32c3
--- /dev/null
+++ b/tools/dev/svn-dev.el
@@ -0,0 +1,566 @@
+;;;; Emacs Lisp help for writing Subversion code. ;;;;
+
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements. See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership. The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License. You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing,
+;; software distributed under the License is distributed on an
+;; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+;; KIND, either express or implied. See the License for the
+;; specific language governing permissions and limitations
+;; under the License.
+
+
+;; Later on, there will be auto-detection of svn files, modeline
+;; status, and a whole library of routines to interface with the
+;; command-line client. For now, there's this, at Ben's request.
+;;
+;; All this stuff should get folded into Emacs VC mode, really.
+
+(defun svn-revert ()
+ "Revert the current buffer and its file to its svn base revision."
+ (interactive)
+ (let ((obuf (current-buffer))
+ (fname (buffer-file-name))
+ (outbuf (get-buffer-create "*svn output*")))
+ (set-buffer outbuf)
+ (delete-region (point-min) (point-max))
+ (call-process "svn" nil outbuf nil "status" fname)
+ (goto-char (point-min))
+ (search-forward fname)
+ (beginning-of-line)
+ (if (looking-at "^?")
+ (error "\"%s\" is not a Subversion-controlled file" fname))
+ (call-process "svn" nil outbuf nil "revert" fname)
+ (set-buffer obuf)
+ ;; todo: make a backup~ file?
+ (save-excursion
+ (revert-buffer nil t)
+ (save-buffer))
+ (message "Reverted \"%s\"." fname)))
+
+(defun svn-resolved ()
+ "Tell Subversion that conflicts in the current buffer and its file have
+been resolved."
+ (interactive)
+ (let ((obuf (current-buffer))
+ (fname (buffer-file-name))
+ (outbuf (get-buffer-create "*svn output*")))
+ (set-buffer outbuf)
+ (delete-region (point-min) (point-max))
+ (call-process "svn" nil outbuf nil "status" fname)
+ (goto-char (point-min))
+ (search-forward fname)
+ (beginning-of-line)
+ (if (looking-at "^?")
+ (error "\"%s\" is not a Subversion-controlled file" fname))
+ (call-process "svn" nil outbuf nil "resolved" fname)
+ (set-buffer obuf)
+ ;; todo: make a backup~ file?
+ (save-excursion
+ (revert-buffer nil t)
+ (save-buffer))
+ (message "Marked \"%s\" as conflict-free." fname)))
+
+(defconst svn-adm-area ".svn"
+ "The name of the Subversion administrative subdirectory.")
+
+(defconst svn-adm-entries ".svn/entries"
+ "The path from cwd to the Subversion entries file.")
+
+(defun svn-controlled-path-p (path)
+ "Return non-nil if PATH is under Subversion version control, else
+return nil. If PATH does not exist, return nil.
+
+In the future, this will return an Emacs Lisp reflection of PATH's
+entry, either an explicit svn-entry-struct, or a list of the form
+\(LAST-COMMIT-REV CURRENT-REV LAST-COMMITTER ...\), so we can display
+svn information in the mode line. But that requires truly parsing the
+entries file, instead of just detecting PATH among the entries."
+ (interactive "f") ; any use for interactive, other than testing?
+ (cond
+ ((not (file-exists-p path))
+ nil)
+ ((file-directory-p path)
+ (let ((adm-area (concat path "/" svn-adm-area)))
+ (if (file-directory-p adm-area)
+ t
+ nil)))
+ (t
+ (let ((entries (concat (file-name-directory path) svn-adm-entries))
+ (basename (file-name-nondirectory path))
+ (found nil))
+ (save-excursion
+ (if (file-directory-p (concat (file-name-directory path) svn-adm-area))
+ (progn
+ (let ((find-file-hooks nil))
+ (set-buffer (find-file-noselect entries t)))
+ (goto-char (point-min))
+ (if (search-forward (format "name=\"%s\"" basename) nil t)
+ (setq found t)
+ (setq found nil))
+ (kill-buffer nil)))
+ found)))))
+
+
+(defun svn-text-base-path (file)
+ "Return the path to the text base for FILE (a string).
+If FILE is a directory or not under version control, return nil."
+ (cond
+ ((not (svn-controlled-path-p file)) nil)
+ ((file-directory-p file) nil)
+ (t
+ (let* ((pdir (file-name-directory file))
+ (base (file-name-nondirectory file)))
+ (format "%s%s/text-base/%s.svn-base" (or pdir "") svn-adm-area base)))))
+
+
+(defun svn-ediff (file)
+ "Ediff FILE against its text base."
+ (interactive "fsvn ediff: ")
+ (let ((tb (svn-text-base-path file)))
+ (if (not tb)
+ (error "No text base for %s" file)
+ (ediff-files file tb))))
+
+
+(defun svn-find-file-hook ()
+ "Function for find-file-hooks.
+Inhibit backup files unless `vc-make-backup-files' is non-nil."
+ (if (svn-controlled-path-p (buffer-file-name))
+ (progn
+ (if (string-match "XEMACS\\|XEmacs\\|xemacs" emacs-version)
+ (vc-load-vc-hooks)) ; for `vc-make-backup-files'
+ (unless vc-make-backup-files
+ (make-local-variable 'backup-inhibited)
+ (setq backup-inhibited t)))))
+
+(add-hook 'find-file-hooks 'svn-find-file-hook)
+
+
+
+;;; Dynamic generation of common Subversion URLs.
+;;;
+;;; (I have a version of this that actually fetches the stuff from the
+;;; Net if you don't have a local copy, but it requires a very recent
+;;; version of Emacs, so I didn't bother with it here. -kfogel)
+
+(defvar svn-site-source-tree-top (expand-file-name "~/projects/svn/site/")
+ "*Top directory of your Subversion site source tree of
+repository \"http://svn.apache.org/repos/asf/subversion/site\".
+You almost certainly want to set this in your .emacs, to override
+the default; use `(setq svn-site-source-tree-top
+\"/path/to/the/site/tree\")'.")
+
+(defvar svn-faq-file (concat svn-site-source-tree-top "/publish/faq.html")
+ "*A local copy of the Subversion FAQ.")
+
+(defvar svn-hacking-file (concat svn-site-source-tree-top
+ "/docs/community-guide/community-guide.html")
+ "*A local copy of the Subversion hacking.html file.")
+
+;; Helper for referring to issue numbers in a user-friendly way.
+(defun svn-bug-url (n)
+ "Insert the url for Subversion issue number N. Interactively, prompt for N."
+ (interactive "nSubversion issue number: ")
+ (insert (format "http://subversion.tigris.org/issues/show_bug.cgi?id=%d" n)))
+
+;; Helper for referring to revisions in a browser-friendly way.
+(defun svn-rev-url (rev &optional transform)
+ "Insert the url for Subversion revision REV, or if TRANSFORM is not
+nil, then transform the revision at or around point into an HTML link.
+
+Interactively, if at or inside a revision number, transform it into
+full HTML link; otherwise, prompt for revision number and insert just
+the resulting URL."
+ (interactive (let ((thing (thing-at-point 'word)))
+ (if (and thing (string-match "r[0-9]+" thing))
+ (list thing t)
+ (list (read-string "Subversion revision number: ") nil))))
+ (if (string-match "^r[0-9]+" rev)
+ (setq rev (substring rev 1)))
+ (if transform
+ (let* ((bounds (bounds-of-thing-at-point 'word))
+ (start (car bounds))
+ (end (cdr bounds)))
+ (delete-region start end)))
+ (insert (format "http://svn.apache.org/viewcvs?view=revision&revision=%s"
+ rev)))
+
+(defconst svn-url-base "http://subversion.apache.org/")
+(defconst svn-faq-url (concat svn-url-base "faq.html"))
+(defconst svn-hacking-url (concat svn-url-base
+ "docs/community-guide/community-guide.html"))
+
+(defun svn-html-get-targets (file)
+ "Build a list of targets for the Subversion web file FILE."
+ (let* ((lst nil)
+ (already-buffer (find-buffer-visiting file))
+ (faq-buffer (or already-buffer (find-file-noselect file))))
+ (save-excursion
+ (set-buffer faq-buffer)
+ (goto-char (point-min))
+ ;; TODO: Ideally, this wouldn't depend on the presence of a
+ ;; table of contents with "#" URLs, it would read the divs and
+ ;; anchors themselves.
+ (while (search-forward "href=\"#" nil t)
+ (let ((b (point))
+ (e (progn (search-forward "\"") (forward-char -1) (point))))
+ (setq lst (cons (buffer-substring b e) lst))))
+ (if (not already-buffer)
+ (kill-buffer (current-buffer)))
+ lst)))
+
+(defun svn-url-completing-read (file prompt &optional hist-list)
+ "Completingly read an HTML target for FILE, prompting with PROMPT.
+If HIST-LIST is non-nil, it is a symbol: the completion history list to use."
+ (progn
+ (let* ((targets (svn-html-get-targets file))
+ (target-str (completing-read prompt targets nil t nil hist-list)))
+ (list target-str))))
+
+(defvar svn-faq-history-list nil
+ "History list for the 'svn-faq-url' prompt.")
+
+(defvar svn-hacking-history-list nil
+ "History list for the 'svn-hacking-url' prompt.")
+
+(defun svn-faq-url (target)
+ "Prompt with completion for a targeted SVN FAQ item, then insert it.
+If called non-interactively, TARGET is the target within the faq (an
+HTML anchor name, that is, the part after the \"#\")."
+ (interactive
+ (svn-url-completing-read svn-faq-file "FAQ entry: "
+ 'svn-faq-history-list))
+ (insert svn-faq-url "#" target))
+
+(defun svn-hacking-url (target)
+ "Prompt with completion for a targeted hacking.html item, then insert it.
+If called non-interactively, TARGET is the target within hacking.html
+(an HTML anchor name, that is, the part after the \"#\")."
+ (interactive
+ (svn-url-completing-read svn-hacking-file "hacking.html entry: "
+ 'svn-hacking-history-list))
+ (insert svn-hacking-url "#" target))
+
+
+
+;;; Subversion C conventions
+(if (eq major-mode 'c-mode)
+ (progn
+ (c-add-style "svn" '("gnu" (c-offsets-alist . ((inextern-lang . 0)))))
+ (c-set-style "svn")))
+(setq indent-tabs-mode nil)
+(setq angry-mob-with-torches-and-pitchforks t)
+
+
+
+;; Subversion Python conventions, plus some harmless helpers for
+;; people who don't have python mode set up by default.
+(autoload 'python-mode "python-mode" nil t)
+(or (assoc "\\.py$" auto-mode-alist)
+ (setq auto-mode-alist
+ (cons '("\\.py$" . python-mode) auto-mode-alist)))
+
+(defun svn-python-mode-hook ()
+ "Set up the Subversion python conventions. The effect of this is
+local to the current buffer, which is presumably visiting a file in
+the Subversion project. Python setup in other buffers will not be
+affected."
+ (when (string-match "/subversion/" (buffer-file-name))
+ (make-local-variable 'py-indent-offset)
+ (setq indent-tabs-mode nil)
+ (setq py-indent-offset 2)
+ (make-local-variable 'py-smart-indentation)
+ (setq py-smart-indentation nil)))
+
+(add-hook 'python-mode-hook 'svn-python-mode-hook)
+
+
+
+;; Much of the APR documentation is embedded perldoc format. The
+;; perldoc program itself sucks, however. If you're the author of
+;; perldoc, I'm sorry, but what were you thinking? Don't you know
+;; that there are people in the world who don't work in vt100
+;; terminals? If I want to view a perldoc page in my Emacs shell
+;; buffer, I have to run the ridiculous command
+;;
+;; $ PAGER=cat perldoc -t target_file
+;;
+;; (Not that this was documented anywhere, I had to figure it out for
+;; myself by reading /usr/bin/perldoc).
+;;
+;; Non-paging behavior should be a standard command-line option. No
+;; program that can output text should *ever* insist on invoking the
+;; pager.
+;;
+;; Anyway, these Emacs commands will solve the problem for us.
+;;
+;; Acknowledgements:
+;; Much of this code is copied from man.el in the FSF Emacs 21.x
+;; sources.
+
+(defcustom svn-perldoc-overstrike-face 'bold
+ "*Face to use when fontifying overstrike."
+ :type 'face
+ :group 'svn-dev)
+
+(defcustom svn-perldoc-underline-face 'underline
+ "*Face to use when fontifying underlining."
+ :type 'face
+ :group 'svn-dev)
+
+
+(defun svn-perldoc-softhyphen-to-minus ()
+ ;; \255 is some kind of dash in Latin-N. Versions of Debian man, at
+ ;; least, emit it even when not in a Latin-N locale.
+ (unless (eq t (compare-strings "latin-" 0 nil
+ current-language-environment 0 6 t))
+ (goto-char (point-min))
+ (let ((str "\255"))
+ (if enable-multibyte-characters
+ (setq str (string-as-multibyte str)))
+ (while (search-forward str nil t) (replace-match "-")))))
+
+
+(defun svn-perldoc-fontify-buffer ()
+ "Convert overstriking and underlining to the correct fonts.
+Same for the ANSI bold and normal escape sequences."
+ (interactive)
+ (message "Please wait, making up the page...")
+ (goto-char (point-min))
+ (while (search-forward "\e[1m" nil t)
+ (delete-backward-char 4)
+ (put-text-property (point)
+ (progn (if (search-forward "\e[0m" nil 'move)
+ (delete-backward-char 4))
+ (point))
+ 'face svn-perldoc-overstrike-face))
+ (goto-char (point-min))
+ (while (search-forward "_\b" nil t)
+ (backward-delete-char 2)
+ (put-text-property (point) (1+ (point)) 'face svn-perldoc-underline-face))
+ (goto-char (point-min))
+ (while (search-forward "\b_" nil t)
+ (backward-delete-char 2)
+ (put-text-property (1- (point)) (point) 'face svn-perldoc-underline-face))
+ (goto-char (point-min))
+ (while (re-search-forward "\\(.\\)\\(\b\\1\\)+" nil t)
+ (replace-match "\\1")
+ (put-text-property (1- (point)) (point) 'face svn-perldoc-overstrike-face))
+ (goto-char (point-min))
+ (while (re-search-forward "o\b\\+\\|\\+\bo" nil t)
+ (replace-match "o")
+ (put-text-property (1- (point)) (point) 'face 'bold))
+ (goto-char (point-min))
+ (while (re-search-forward "[-|]\\(\b[-|]\\)+" nil t)
+ (replace-match "+")
+ (put-text-property (1- (point)) (point) 'face 'bold))
+ (svn-perldoc-softhyphen-to-minus)
+ (message "Please wait, making up the page...done"))
+
+
+(defun svn-perldoc-cleanup-buffer ()
+ "Remove overstriking and underlining from the current buffer."
+ (interactive)
+ (message "Please wait, cleaning up the page...")
+ (progn
+ (goto-char (point-min))
+ (while (search-forward "_\b" nil t) (backward-delete-char 2))
+ (goto-char (point-min))
+ (while (search-forward "\b_" nil t) (backward-delete-char 2))
+ (goto-char (point-min))
+ (while (re-search-forward "\\(.\\)\\(\b\\1\\)+" nil t)
+ (replace-match "\\1"))
+ (goto-char (point-min))
+ (while (re-search-forward "\e\\[[0-9]+m" nil t) (replace-match ""))
+ (goto-char (point-min))
+ (while (re-search-forward "o\b\\+\\|\\+\bo" nil t) (replace-match "o"))
+ (goto-char (point-min))
+ (while (re-search-forward "" nil t) (replace-match " ")))
+ (goto-char (point-min))
+ (while (re-search-forward "[-|]\\(\b[-|]\\)+" nil t) (replace-match "+"))
+ (svn-perldoc-softhyphen-to-minus)
+ (message "Please wait, cleaning up the page...done"))
+
+
+;; Entry point to svn-perldoc functionality.
+(defun svn-perldoc (file)
+ "Run perldoc on FILE, display the output in a buffer."
+ (interactive "fRun perldoc on file: ")
+ (let ((outbuf (get-buffer-create
+ (format "*%s PerlDoc*" (file-name-nondirectory file))))
+ (savepg (getenv "PAGER")))
+ (setenv "PAGER" "cat") ;; for perldoc
+ (save-excursion
+ (set-buffer outbuf)
+ (delete-region (point-min) (point-max))
+ (call-process "perldoc" nil outbuf nil (expand-file-name file))
+ (svn-perldoc-fontify-buffer)
+ (svn-perldoc-cleanup-buffer)
+ ;; Clean out the inevitable leading dead space.
+ (goto-char (point-min))
+ (re-search-forward "[^ \i\n]")
+ (beginning-of-line)
+ (delete-region (point-min) (point)))
+ (setenv "PAGER" savepg)
+ (display-buffer outbuf)))
+
+
+
+;;; Help developers write log messages.
+
+;; How to use this: just run `svn-log-message'. You might want to
+;; bind it to a key, for example,
+;;
+;; (define-key "\C-cl" 'svn-log-message)
+;;
+;; The log message will accumulate in a file. Later, you can use
+;; that file when you commit:
+;;
+;; $ svn ci -F msg ...
+
+(defun svn-log-path-derive (path)
+ "Derive a relative directory path for absolute PATH, for a log entry."
+ (save-match-data
+ (let ((base (file-name-nondirectory path))
+ (chop-spot (string-match
+ "\\(code/\\)\\|\\(src/\\)\\|\\(projects/\\)"
+ path)))
+ (if chop-spot
+ (progn
+ (setq path (substring path (match-end 0)))
+ ;; Kluge for Subversion developers.
+ (if (string-match "subversion/" path)
+ (substring path (+ (match-beginning 0) 11))
+ path))
+ (string-match (expand-file-name "~/") path)
+ (substring path (match-end 0))))))
+
+
+(defun svn-log-message-file ()
+ "Return the name of the appropriate log message accumulation file.
+Usually this is just the file `msg' in the current directory, but
+certain areas are treated specially, for example, the Subversion
+source tree."
+ (save-match-data
+ (if (string-match "subversion" default-directory)
+ (concat (substring default-directory 0 (match-end 0)) "/msg")
+ "msg")))
+
+
+(defun svn-log-message (short-file-names)
+ "Add to an in-progress log message, based on context around point.
+If prefix arg SHORT-FILE-NAMES is non-nil, then use basenames only in
+log messages, otherwise use full paths. The current defun name is
+always used.
+
+If the log message already contains material about this defun, then put
+point there, so adding to that material is easy.
+
+Else if the log message already contains material about this file, put
+point there, and push onto the kill ring the defun name with log
+message dressing around it, plus the raw defun name, so yank and
+yank-next are both useful.
+
+Else if there is no material about this defun nor file anywhere in the
+log message, then put point at the end of the message and insert a new
+entry for file with defun.
+
+See also the function `svn-log-message-file'."
+ (interactive "P")
+ (let ((this-file (if short-file-names
+ (file-name-nondirectory buffer-file-name)
+ (svn-log-path-derive buffer-file-name)))
+ (this-defun (or (add-log-current-defun)
+ (save-excursion
+ (save-match-data
+ (if (eq major-mode 'c-mode)
+ (progn
+ (if (fboundp 'c-beginning-of-statement-1)
+ (c-beginning-of-statement-1)
+ (c-beginning-of-statement))
+ (search-forward "(" nil t)
+ (forward-char -1)
+ (forward-sexp -1)
+ (buffer-substring
+ (point)
+ (progn (forward-sexp 1) (point)))))))))
+ (log-file (svn-log-message-file)))
+ (find-file log-file)
+ (goto-char (point-min))
+ ;; Strip text properties from strings
+ (set-text-properties 0 (length this-file) nil this-file)
+ (set-text-properties 0 (length this-defun) nil this-defun)
+ ;; If log message for defun already in progress, add to it
+ (if (and
+ this-defun ;; we have a defun to work with
+ (search-forward this-defun nil t) ;; it's in the log msg already
+ (save-excursion ;; and it's about the same file
+ (save-match-data
+ (if (re-search-backward ; Ick, I want a real filename regexp!
+ "^\\*\\s-+\\([a-zA-Z0-9-_.@=+^$/%!?(){}<>]+\\)" nil t)
+ (string-equal (match-string 1) this-file)
+ t))))
+ (if (re-search-forward ":" nil t)
+ (if (looking-at " ") (forward-char 1)))
+ ;; Else no log message for this defun in progress...
+ (goto-char (point-min))
+ ;; But if log message for file already in progress, add to it.
+ (if (search-forward this-file nil t)
+ (progn
+ (if this-defun (progn
+ (kill-new (format "(%s): " this-defun))
+ (kill-new this-defun)))
+ (search-forward ")" nil t)
+ (if (looking-at " ") (forward-char 1)))
+ ;; Found neither defun nor its file, so create new entry.
+ (goto-char (point-max))
+ (if (not (bolp)) (insert "\n"))
+ (insert (format "\n* %s (%s): " this-file (or this-defun "")))
+ ;; Finally, if no derived defun, put point where the user can
+ ;; type it themselves.
+ (if (not this-defun) (forward-char -3))))))
+
+
+
+;;; Log message helpers.
+
+(defconst svn-log-msg-sep-line
+ "------------------------------------------------------------------------"
+ "The line of dashes that separates log messages in 'svn log' output.")
+
+(defconst svn-log-msg-boundary-regexp
+ (concat "^" svn-log-msg-sep-line "\n" "r[0-9]+ | ")
+ "Regular expression matching the start of a log msg. The start is
+the beginning of the separator line, not the rev/author/date line that
+follows the separator line.")
+
+(defun svn-narrow-to-log-msg ()
+ "Narrow to the current Subversion log message.
+This meant to be used while browsing the output of 'svn log'.
+If point is not in such output, error."
+ (interactive)
+ (let ((start nil) (end nil))
+ (save-excursion
+ (re-search-backward svn-log-msg-boundary-regexp)
+ (forward-line 1)
+ (setq start (point))
+ (end-of-line)
+ (re-search-backward "| \\([0-9]+\\) ")
+ (let ((num (match-string 1)))
+ (re-search-forward "^\n")
+ (forward-line (string-to-number num)))
+ (setq end (point)))
+ (narrow-to-region start end)))
+
+
+
+(message "loaded svn-dev.el")
diff --git a/tools/dev/svn-dev.vim b/tools/dev/svn-dev.vim
new file mode 100644
index 0000000..cf2c50d
--- /dev/null
+++ b/tools/dev/svn-dev.vim
@@ -0,0 +1,76 @@
+" This file sets vim up to use subversion's coding style. It can be applied on
+" a per buffer basis with :source svn-dev.vim, or can be source from ~/.vimrc to
+" apply settings to all files vim uses. For other variation try :help autocmd.
+"
+" Licensed to the Apache Software Foundation (ASF) under one
+" or more contributor license agreements. See the NOTICE file
+" distributed with this work for additional information
+" regarding copyright ownership. The ASF licenses this file
+" to you under the Apache License, Version 2.0 (the
+" "License"); you may not use this file except in compliance
+" with the License. You may obtain a copy of the License at
+"
+" http://www.apache.org/licenses/LICENSE-2.0
+"
+" Unless required by applicable law or agreed to in writing,
+" software distributed under the License is distributed on an
+" "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+" KIND, either express or implied. See the License for the
+" specific language governing permissions and limitations
+" under the License.
+"
+" TODO: Try to find a way to wrap comments without putting a * on the next line,
+" since most of subversion doesn't use that style. (Note that taking cro out of
+" formatoptions won't quite work, because then comments won't be wrapped by
+" default).
+"
+" Expand tab characters to spaces
+set expandtab
+
+" Tab key moves 8 spaces
+set tabstop=8
+
+" '>>' moves 4 spaces
+set shiftwidth=4
+
+" Wrap lines at 78 columns.
+" 78 so that vim won't swap over to the right before it wraps a line.
+set textwidth=78
+
+" What counts as part of a word (used for tag matching, and motion commands)
+set iskeyword=a-z,A-Z,48-57,_,.,-,>
+
+" How to wrap lines
+" t=wrap lines, c=wrap comments, inserting comment leader, r=insert comment
+" leader after an <ENTER>, o=Insert comment leader after an 'o', q=Allow
+" formatting of comments with 'gq'
+set formatoptions=tcroq
+
+" Use C style indenting
+set cindent
+
+" Use the following rules to do C style indenting
+" (Note that an s mean number*shiftwidth)
+" >=normal indent,
+" e=indent inside braces(brace at end of line),
+" n=Added to normal indent if no braces,
+" f=opening brace of function,
+" {=opening braces,
+" }=close braces (from opening),
+" ^s=indent after brace, if brace is on column 0,
+" := case labels from switch, ==statements after case,
+" t=function return type,
+" +=continuation line,
+" c=comment lines from opener,
+" (=unclosed parens (0 means match),
+" u=same as ( but for second set of parens
+"
+" Try :help cinoptions-values
+set cinoptions=>1s,e0,n-2,f0,{.5s,}0,^-.5s,=.5s,t0,+1s,c3,(0,u0,\:2
+
+" The following modelines can also be used to set the same options.
+"/*
+" * vim:ts=8:sw=4:expandtab:tw=78:fo=tcroq cindent
+" * vim:isk=a-z,A-Z,48-57,_,.,-,>
+" * vim:cino=>1s,e0,n-2,f0,{.5s,}0,^-.5s,=.5s,t0,+1s,c3,(0,u0,\:2
+" */
diff --git a/tools/dev/svn-entries.el b/tools/dev/svn-entries.el
new file mode 100644
index 0000000..fff322a
--- /dev/null
+++ b/tools/dev/svn-entries.el
@@ -0,0 +1,156 @@
+;;; svn-entries.el --- Display .svn/entries field names to the left
+
+;; Copyright (C) 2007 David Glasser
+
+;; Licensed under the same license as Subversion.
+
+;; Licensed to the Apache Software Foundation (ASF) under one
+;; or more contributor license agreements. See the NOTICE file
+;; distributed with this work for additional information
+;; regarding copyright ownership. The ASF licenses this file
+;; to you under the Apache License, Version 2.0 (the
+;; "License"); you may not use this file except in compliance
+;; with the License. You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing,
+;; software distributed under the License is distributed on an
+;; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+;; KIND, either express or implied. See the License for the
+;; specific language governing permissions and limitations
+;; under the License.
+
+;;; Commentary:
+
+;; Display field names to the left of the lines in a .svn/entries
+;; buffer. Copy svn-entries.el to your load-path and add to your
+;; .emacs:
+
+;; (require 'svn-entries)
+
+;; After opening or editing an entries file, run
+
+;; M-x svn-entries-show
+
+;; To hide:
+
+;; M-x svn-entries-hide
+
+;; (I tried doing this as a minor mode but setting margins during
+;; alist initialization didn't work...)
+
+;; Tested on FSF Emacs 22.
+
+
+(defvar svn-entries-overlays nil "Overlays used in this buffer.")
+(make-variable-buffer-local 'svn-entries-overlays)
+
+(defgroup svn-entries nil
+ "Show labels to the left of .svn/entries buffers"
+ :group 'convenience)
+
+(defface svn-entries
+ '((t :inherit shadow))
+ "Face for displaying line numbers in the display margin."
+ :group 'svn-entries)
+
+(defun svn-entries-set-margins (buf margin)
+ (dolist (w (get-buffer-window-list buf nil t))
+ (set-window-margins w margin)))
+
+(defun svn-entries-hide ()
+ "Delete all overlays displaying labels for this buffer."
+ (interactive)
+ (mapc #'delete-overlay svn-entries-overlays)
+ (setq svn-entries-overlays nil)
+ (svn-entries-set-margins (current-buffer) 0)
+ (remove-hook 'window-configuration-change-hook
+ 'svn-entries-after-config t))
+
+(defun svn-entries-show ()
+ "Update labels for the current buffer."
+ (interactive)
+ (svn-entries-update (current-buffer))
+ (add-hook 'window-configuration-change-hook
+ 'svn-entries-after-config nil t))
+
+(defconst svn-entries-labels
+ ["name"
+ "kind"
+ "revision"
+ "url"
+ "repos"
+ "schedule"
+ "text-time"
+ "checksum"
+ "committed-date"
+ "committed-rev"
+ "last-author"
+ "has-props"
+ "has-prop-mods"
+ "cachable-props"
+ "present-props"
+ "conflict-old"
+ "conflict-new"
+ "conflict-wrk"
+ "prop-reject-file"
+ "copied"
+ "copyfrom-url"
+ "copyfrom-rev"
+ "deleted"
+ "absent"
+ "incomplete"
+ "uuid"
+ "lock-token"
+ "lock-owner"
+ "lock-comment"
+ "lock-creation-date"
+ "changelist"
+ "keep-local"
+ "working-size"
+ "depth"])
+
+(defconst svn-entries-margin-width (length "lock-creation-date"))
+
+(defun svn-entries-update (buffer)
+ "Update labels for all windows displaying BUFFER."
+ (with-current-buffer buffer
+ (svn-entries-hide)
+ (save-excursion
+ (save-restriction
+ (widen)
+ (let ((last-line (line-number-at-pos (point-max)))
+ (field 0)
+ (done nil))
+ (goto-char (point-min))
+ (while (not done)
+ (cond ((= (point) 1)
+ (svn-entries-overlay-here "format"))
+ ((= (following-char) 12) ; ^L
+ (setq field 0))
+ ((not (eobp))
+ (svn-entries-overlay-here (elt svn-entries-labels field))
+ (setq field (1+ field))))
+ (setq done (> (forward-line) 0))))))
+ (svn-entries-set-margins buffer svn-entries-margin-width)))
+
+(defun svn-entries-overlay-here (label)
+ (let* ((fmt-label (propertize label 'face 'svn-entries))
+ (left-label (propertize " " 'display `((margin left-margin)
+ ,fmt-label)))
+ (ov (make-overlay (point) (point))))
+ (push ov svn-entries-overlays)
+ (overlay-put ov 'before-string left-label)))
+
+(defun svn-entries-after-config ()
+ (walk-windows (lambda (w) (svn-entries-set-margins-if-overlaid (window-buffer)))
+ nil 'visible))
+
+(defun svn-entries-set-margins-if-overlaid (b)
+ (with-current-buffer b
+ (when svn-entries-overlays
+ (svn-entries-set-margins b svn-entries-margin-width))))
+
+(provide 'svn-entries)
+;;; svn-entries.el ends here
diff --git a/tools/dev/svn-merge-revs.py b/tools/dev/svn-merge-revs.py
new file mode 100755
index 0000000..f67dae4
--- /dev/null
+++ b/tools/dev/svn-merge-revs.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import sys
+import os
+
+progname = os.path.basename(sys.argv[0])
+
+def usage():
+ print("Usage: %s SOURCEURL WCPATH [r]REVNUM[,] [...]" % progname)
+ print("Try '%s --help' for more information" % progname)
+
+def help():
+ val = """This script is meant to ease the pain of merging and
+reviewing revision(s) on a release branch (although it can be used to
+merge and review revisions from any line of development to another).
+
+To allow cutting and pasting from the STATUS file, revision numbers
+can be space or comma-separated, and may also include the prefix
+'r'.
+
+Lastly, a file (named 'rev1-rev2-rev3.log') is created for you.
+This file contains each merge command that was run, the log of the
+revision that was merged, and the diff from the previous revision.
+
+Examples:
+
+ %s http://svn.apache.org/repos/asf/subversion/trunk svn-1.2.x-branch \
+ r14041, r14149, r14186, r14194, r14238, r14273
+
+ %s http://svn.apache.org/repos/asf/subversion/trunk svn-1.2.x-branch \
+ 14041 14149 14186 14194 14238 14273""" % (progname, progname)
+ print(val)
+
+
+if len(sys.argv) > 1 and sys.argv[1] == '--help':
+ help()
+ sys.exit(0)
+
+if len(sys.argv) < 4:
+ usage()
+ sys.exit(255)
+
+src_url = sys.argv[1]
+wc_path = sys.argv[2]
+
+# Tolerate comma separated lists of revs (e.g. "r234, r245, r251")
+revs = []
+for rev in sys.argv[3:]:
+ orig_rev = rev
+ if rev[-1:] == ',':
+ rev = rev[:-1]
+
+ if rev[:1] == 'r':
+ rev = rev[1:]
+
+ try:
+ rev = int(rev)
+ except ValueError:
+ print("Encountered non integer revision '%s'" % orig_rev)
+ usage()
+ sys.exit(254)
+ revs.append(rev)
+
+# Make an easily reviewable logfile
+logfile = "-".join([str(x) for x in revs]) + ".log"
+log = open(logfile, 'w')
+
+for rev in revs:
+ merge_cmd = ("svn merge -r%i:%i %s %s" % (rev - 1, rev, src_url, wc_path))
+ log_cmd = 'svn log -v -r%i %s' % (rev, src_url)
+ diff_cmd = 'svn diff -r%i:%i %s' % (rev -1, rev, src_url)
+
+ # Do the merge
+ os.system(merge_cmd)
+
+ # Write our header
+ log.write("=" * 72 + '\n')
+ log.write(merge_cmd + '\n')
+
+ # Get our log
+ fh = os.popen(log_cmd)
+ while True:
+ line = fh.readline()
+ if not line:
+ break
+ log.write(line)
+ fh.close()
+
+ # Get our diff
+ fh = os.popen(diff_cmd)
+ while True:
+ line = fh.readline()
+ if not line:
+ break
+ log.write(line)
+
+ # Write our footer
+ log.write("=" * 72 + '\n' * 10)
+
+
+log.close()
+print("\nYour logfile is '%s'" % logfile)
diff --git a/tools/dev/svnmover/linenoise/LICENSE b/tools/dev/svnmover/linenoise/LICENSE
new file mode 100644
index 0000000..18e8148
--- /dev/null
+++ b/tools/dev/svnmover/linenoise/LICENSE
@@ -0,0 +1,25 @@
+Copyright (c) 2010-2014, Salvatore Sanfilippo <antirez at gmail dot com>
+Copyright (c) 2010-2013, Pieter Noordhuis <pcnoordhuis at gmail dot com>
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/tools/dev/svnmover/linenoise/README.markdown b/tools/dev/svnmover/linenoise/README.markdown
new file mode 100644
index 0000000..c845673
--- /dev/null
+++ b/tools/dev/svnmover/linenoise/README.markdown
@@ -0,0 +1,52 @@
+# Linenoise
+
+A minimal, zero-config, BSD licensed, readline replacement used in Redis,
+MongoDB, and Android.
+
+* Single and multi line editing mode with the usual key bindings implemented.
+* History handling.
+* Completion.
+* About 1,100 lines of BSD license source code.
+* Only uses a subset of VT100 escapes (ANSI.SYS compatible).
+
+## Can a line editing library be 20k lines of code?
+
+Line editing with some support for history is a really important feature for command line utilities. Instead of retyping almost the same stuff again and again it's just much better to hit the up arrow and edit on syntax errors, or in order to try a slightly different command. But apparently code dealing with terminals is some sort of Black Magic: readline is 30k lines of code, libedit 20k. Is it reasonable to link small utilities to huge libraries just to get a minimal support for line editing?
+
+So what usually happens is either:
+
+ * Large programs with configure scripts disabling line editing if readline is not present in the system, or not supporting it at all since readline is GPL licensed and libedit (the BSD clone) is not as known and available as readline is (Real world example of this problem: Tclsh).
+ * Smaller programs not using a configure script not supporting line editing at all (A problem we had with Redis-cli for instance).
+
+The result is a pollution of binaries without line editing support.
+
+So I spent more or less two hours doing a reality check resulting in this little library: is it *really* needed for a line editing library to be 20k lines of code? Apparently not, it is possibe to get a very small, zero configuration, trivial to embed library, that solves the problem. Smaller programs will just include this, supporing line editing out of the box. Larger programs may use this little library or just checking with configure if readline/libedit is available and resorting to linenoise if not.
+
+## Terminals, in 2010.
+
+Apparently almost every terminal you can happen to use today has some kind of support for basic VT100 escape sequences. So I tried to write a lib using just very basic VT100 features. The resulting library appears to work everywhere I tried to use it, and now can work even on ANSI.SYS compatible terminals, since no
+VT220 specific sequences are used anymore.
+
+The library is currently about 1100 lines of code. In order to use it in your project just look at the *example.c* file in the source distribution, it is trivial. Linenoise is BSD code, so you can use both in free software and commercial software.
+
+## Tested with...
+
+ * Linux text only console ($TERM = linux)
+ * Linux KDE terminal application ($TERM = xterm)
+ * Linux xterm ($TERM = xterm)
+ * Linux Buildroot ($TERM = vt100)
+ * Mac OS X iTerm ($TERM = xterm)
+ * Mac OS X default Terminal.app ($TERM = xterm)
+ * OpenBSD 4.5 through an OSX Terminal.app ($TERM = screen)
+ * IBM AIX 6.1
+ * FreeBSD xterm ($TERM = xterm)
+ * ANSI.SYS
+
+Please test it everywhere you can and report back!
+
+## Let's push this forward!
+
+Patches should be provided in the respect of linenoise sensibility for small
+easy to understand code.
+
+Send feedbacks to antirez at gmail
diff --git a/tools/dev/svnmover/linenoise/linenoise.c b/tools/dev/svnmover/linenoise/linenoise.c
new file mode 100644
index 0000000..058f68e
--- /dev/null
+++ b/tools/dev/svnmover/linenoise/linenoise.c
@@ -0,0 +1,1112 @@
+/* linenoise.c -- guerrilla line editing library against the idea that a
+ * line editing lib needs to be 20,000 lines of C code.
+ *
+ * You can find the latest source code at:
+ *
+ * http://github.com/antirez/linenoise
+ *
+ * Does a number of crazy assumptions that happen to be true in 99.9999% of
+ * the 2010 UNIX computers around.
+ *
+ * ------------------------------------------------------------------------
+ *
+ * Copyright (c) 2010-2014, Salvatore Sanfilippo <antirez at gmail dot com>
+ * Copyright (c) 2010-2013, Pieter Noordhuis <pcnoordhuis at gmail dot com>
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * ------------------------------------------------------------------------
+ *
+ * References:
+ * - http://invisible-island.net/xterm/ctlseqs/ctlseqs.html
+ * - http://www.3waylabs.com/nw/WWW/products/wizcon/vt220.html
+ *
+ * Todo list:
+ * - Filter bogus Ctrl+<char> combinations.
+ * - Win32 support
+ *
+ * Bloat:
+ * - History search like Ctrl+r in readline?
+ *
+ * List of escape sequences used by this program, we do everything just
+ * with three sequences. In order to be so cheap we may have some
+ * flickering effect with some slow terminal, but the lesser sequences
+ * the more compatible.
+ *
+ * EL (Erase Line)
+ * Sequence: ESC [ n K
+ * Effect: if n is 0 or missing, clear from cursor to end of line
+ * Effect: if n is 1, clear from beginning of line to cursor
+ * Effect: if n is 2, clear entire line
+ *
+ * CUF (CUrsor Forward)
+ * Sequence: ESC [ n C
+ * Effect: moves cursor forward n chars
+ *
+ * CUB (CUrsor Backward)
+ * Sequence: ESC [ n D
+ * Effect: moves cursor backward n chars
+ *
+ * The following is used to get the terminal width if getting
+ * the width with the TIOCGWINSZ ioctl fails
+ *
+ * DSR (Device Status Report)
+ * Sequence: ESC [ 6 n
+ * Effect: reports the current cusor position as ESC [ n ; m R
+ * where n is the row and m is the column
+ *
+ * When multi line mode is enabled, we also use an additional escape
+ * sequence. However multi line editing is disabled by default.
+ *
+ * CUU (Cursor Up)
+ * Sequence: ESC [ n A
+ * Effect: moves cursor up of n chars.
+ *
+ * CUD (Cursor Down)
+ * Sequence: ESC [ n B
+ * Effect: moves cursor down of n chars.
+ *
+ * When linenoiseClearScreen() is called, two additional escape sequences
+ * are used in order to clear the screen and position the cursor at home
+ * position.
+ *
+ * CUP (Cursor position)
+ * Sequence: ESC [ H
+ * Effect: moves the cursor to upper left corner
+ *
+ * ED (Erase display)
+ * Sequence: ESC [ 2 J
+ * Effect: clear the whole screen
+ *
+ */
+
+/* Tell the compiler to be quiet about implicit conversions from
+ [s]size_t to int. */
+#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2)
+# if defined(__APPLE_CC__) || defined(__clang__)
+# pragma GCC diagnostic ignored "-Wshorten-64-to-32"
+# endif
+#endif
+
+#include <termios.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <errno.h>
+#include <string.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include "linenoise.h"
+
+#define LINENOISE_DEFAULT_HISTORY_MAX_LEN 100
+#define LINENOISE_MAX_LINE 4096
+static const char *unsupported_term[] = {"dumb","cons25","emacs",NULL};
+static linenoiseCompletionCallback *completionCallback = NULL;
+
+static struct termios orig_termios; /* In order to restore at exit.*/
+static int rawmode = 0; /* For atexit() function to check if restore is needed*/
+static int mlmode = 0; /* Multi line mode. Default is single line. */
+static int atexit_registered = 0; /* Register atexit just 1 time. */
+static int history_max_len = LINENOISE_DEFAULT_HISTORY_MAX_LEN;
+static int history_len = 0;
+static char **history = NULL;
+
+/* The linenoiseState structure represents the state during line editing.
+ * We pass this state to functions implementing specific editing
+ * functionalities. */
+struct linenoiseState {
+ int ifd; /* Terminal stdin file descriptor. */
+ int ofd; /* Terminal stdout file descriptor. */
+ char *buf; /* Edited line buffer. */
+ size_t buflen; /* Edited line buffer size. */
+ const char *prompt; /* Prompt to display. */
+ size_t plen; /* Prompt length. */
+ size_t pos; /* Current cursor position. */
+ size_t oldpos; /* Previous refresh cursor position. */
+ size_t len; /* Current edited line length. */
+ size_t cols; /* Number of columns in terminal. */
+ size_t maxrows; /* Maximum num of rows used so far (multiline mode) */
+ int history_index; /* The history index we are currently editing. */
+};
+
+enum KEY_ACTION{
+ KEY_NULL = 0, /* NULL */
+ CTRL_A = 1, /* Ctrl+a */
+ CTRL_B = 2, /* Ctrl-b */
+ CTRL_C = 3, /* Ctrl-c */
+ CTRL_D = 4, /* Ctrl-d */
+ CTRL_E = 5, /* Ctrl-e */
+ CTRL_F = 6, /* Ctrl-f */
+ CTRL_H = 8, /* Ctrl-h */
+ TAB = 9, /* Tab */
+ CTRL_K = 11, /* Ctrl+k */
+ CTRL_L = 12, /* Ctrl+l */
+ ENTER = 13, /* Enter */
+ CTRL_N = 14, /* Ctrl-n */
+ CTRL_P = 16, /* Ctrl-p */
+ CTRL_T = 20, /* Ctrl-t */
+ CTRL_U = 21, /* Ctrl+u */
+ CTRL_W = 23, /* Ctrl+w */
+ ESC = 27, /* Escape */
+ BACKSPACE = 127 /* Backspace */
+};
+
+static void linenoiseAtExit(void);
+static void refreshLine(struct linenoiseState *l);
+
+/* Debugging macro. */
+#if 0
+FILE *lndebug_fp = NULL;
+#define lndebug(...) \
+ do { \
+ if (lndebug_fp == NULL) { \
+ lndebug_fp = fopen("/tmp/lndebug.txt","a"); \
+ fprintf(lndebug_fp, \
+ "[%d %d %d] p: %d, rows: %d, rpos: %d, max: %d, oldmax: %d\n", \
+ (int)l->len,(int)l->pos,(int)l->oldpos,plen,rows,rpos, \
+ (int)l->maxrows,old_rows); \
+ } \
+ fprintf(lndebug_fp, ", " __VA_ARGS__); \
+ fflush(lndebug_fp); \
+ } while (0)
+#else
+static void lndebug(const char *fmt, ...) { }
+#endif
+
+/* ======================= Low level terminal handling ====================== */
+
+/* Set if to use or not the multi line mode. */
+void linenoiseSetMultiLine(int ml) {
+ mlmode = ml;
+}
+
+/* Return true if the terminal name is in the list of terminals we know are
+ * not able to understand basic escape sequences. */
+static int isUnsupportedTerm(void) {
+ char *term = getenv("TERM");
+ int j;
+
+ if (term == NULL) return 0;
+ for (j = 0; unsupported_term[j]; j++)
+ if (!strcasecmp(term,unsupported_term[j])) return 1;
+ return 0;
+}
+
+/* Raw mode: 1960 magic shit. */
+static int enableRawMode(int fd) {
+ struct termios raw;
+
+ if (!isatty(STDIN_FILENO)) goto fatal;
+ if (!atexit_registered) {
+ atexit(linenoiseAtExit);
+ atexit_registered = 1;
+ }
+ if (tcgetattr(fd,&orig_termios) == -1) goto fatal;
+
+ raw = orig_termios; /* modify the original mode */
+ /* input modes: no break, no CR to NL, no parity check, no strip char,
+ * no start/stop output control. */
+ raw.c_iflag &= ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON);
+ /* output modes - disable post processing */
+ raw.c_oflag &= ~(OPOST);
+ /* control modes - set 8 bit chars */
+ raw.c_cflag |= (CS8);
+ /* local modes - choing off, canonical off, no extended functions,
+ * no signal chars (^Z,^C) */
+ raw.c_lflag &= ~(ECHO | ICANON | IEXTEN | ISIG);
+ /* control chars - set return condition: min number of bytes and timer.
+ * We want read to return every single byte, without timeout. */
+ raw.c_cc[VMIN] = 1; raw.c_cc[VTIME] = 0; /* 1 byte, no timer */
+
+ /* put terminal in raw mode after flushing */
+ if (tcsetattr(fd,TCSAFLUSH,&raw) < 0) goto fatal;
+ rawmode = 1;
+ return 0;
+
+fatal:
+ errno = ENOTTY;
+ return -1;
+}
+
+static void disableRawMode(int fd) {
+ /* Don't even check the return value as it's too late. */
+ if (rawmode && tcsetattr(fd,TCSAFLUSH,&orig_termios) != -1)
+ rawmode = 0;
+}
+
+/* Use the ESC [6n escape sequence to query the horizontal cursor position
+ * and return it. On error -1 is returned, on success the position of the
+ * cursor. */
+static int getCursorPosition(int ifd, int ofd) {
+ char buf[32];
+ int cols, rows;
+ unsigned int i = 0;
+
+ /* Report cursor location */
+ if (write(ofd, "\x1b[6n", 4) != 4) return -1;
+
+ /* Read the response: ESC [ rows ; cols R */
+ while (i < sizeof(buf)-1) {
+ if (read(ifd,buf+i,1) != 1) break;
+ if (buf[i] == 'R') break;
+ i++;
+ }
+ buf[i] = '\0';
+
+ /* Parse it. */
+ if (buf[0] != ESC || buf[1] != '[') return -1;
+ if (sscanf(buf+2,"%d;%d",&rows,&cols) != 2) return -1;
+ return cols;
+}
+
+/* Try to get the number of columns in the current terminal, or assume 80
+ * if it fails. */
+static int getColumns(int ifd, int ofd) {
+ struct winsize ws;
+
+ if (ioctl(1, TIOCGWINSZ, &ws) == -1 || ws.ws_col == 0) {
+ /* ioctl() failed. Try to query the terminal itself. */
+ int start, cols;
+
+ /* Get the initial position so we can restore it later. */
+ start = getCursorPosition(ifd,ofd);
+ if (start == -1) goto failed;
+
+ /* Go to right margin and get position. */
+ if (write(ofd,"\x1b[999C",6) != 6) goto failed;
+ cols = getCursorPosition(ifd,ofd);
+ if (cols == -1) goto failed;
+
+ /* Restore position. */
+ if (cols > start) {
+ char seq[32];
+ snprintf(seq,32,"\x1b[%dD",cols-start);
+ if (write(ofd,seq,strlen(seq)) == -1) {
+ /* Can't recover... */
+ }
+ }
+ return cols;
+ } else {
+ return ws.ws_col;
+ }
+
+failed:
+ return 80;
+}
+
+/* Clear the screen. Used to handle ctrl+l */
+void linenoiseClearScreen(void) {
+ if (write(STDOUT_FILENO,"\x1b[H\x1b[2J",7) <= 0) {
+ /* nothing to do, just to avoid warning. */
+ }
+}
+
+/* Beep, used for completion when there is nothing to complete or when all
+ * the choices were already shown. */
+static void linenoiseBeep(void) {
+ fprintf(stderr, "\x7");
+ fflush(stderr);
+}
+
+/* ============================== Completion ================================ */
+
+/* Free a list of completion option populated by linenoiseAddCompletion(). */
+static void freeCompletions(linenoiseCompletions *lc) {
+ size_t i;
+ for (i = 0; i < lc->len; i++)
+ free(lc->cvec[i]);
+ if (lc->cvec != NULL)
+ free(lc->cvec);
+}
+
+/* This is an helper function for linenoiseEdit() and is called when the
+ * user types the <tab> key in order to complete the string currently in the
+ * input.
+ *
+ * The state of the editing is encapsulated into the pointed linenoiseState
+ * structure as described in the structure definition. */
+static int completeLine(struct linenoiseState *ls) {
+ linenoiseCompletions lc = { 0, NULL };
+ int nread, nwritten;
+ char c = 0;
+
+ completionCallback(ls->buf,&lc);
+ if (lc.len == 0) {
+ linenoiseBeep();
+ } else {
+ size_t stop = 0, i = 0;
+
+ while(!stop) {
+ /* Show completion or original buffer */
+ if (i < lc.len) {
+ struct linenoiseState saved = *ls;
+
+ ls->len = ls->pos = strlen(lc.cvec[i]);
+ ls->buf = lc.cvec[i];
+ refreshLine(ls);
+ ls->len = saved.len;
+ ls->pos = saved.pos;
+ ls->buf = saved.buf;
+ } else {
+ refreshLine(ls);
+ }
+
+ nread = read(ls->ifd,&c,1);
+ if (nread <= 0) {
+ freeCompletions(&lc);
+ return -1;
+ }
+
+ switch(c) {
+ case 9: /* tab */
+ i = (i+1) % (lc.len+1);
+ if (i == lc.len) linenoiseBeep();
+ break;
+ case 27: /* escape */
+ /* Re-show original buffer */
+ if (i < lc.len) refreshLine(ls);
+ stop = 1;
+ break;
+ default:
+ /* Update buffer and return */
+ if (i < lc.len) {
+ nwritten = snprintf(ls->buf,ls->buflen,"%s",lc.cvec[i]);
+ ls->len = ls->pos = nwritten;
+ }
+ stop = 1;
+ break;
+ }
+ }
+ }
+
+ freeCompletions(&lc);
+ return c; /* Return last read character */
+}
+
+/* Register a callback function to be called for tab-completion. */
+void linenoiseSetCompletionCallback(linenoiseCompletionCallback *fn) {
+ completionCallback = fn;
+}
+
+/* This function is used by the callback function registered by the user
+ * in order to add completion options given the input string when the
+ * user typed <tab>. See the example.c source code for a very easy to
+ * understand example. */
+void linenoiseAddCompletion(linenoiseCompletions *lc, const char *str) {
+ size_t len = strlen(str);
+ char *copy, **cvec;
+
+ copy = malloc(len+1);
+ if (copy == NULL) return;
+ memcpy(copy,str,len+1);
+ cvec = realloc(lc->cvec,sizeof(char*)*(lc->len+1));
+ if (cvec == NULL) {
+ free(copy);
+ return;
+ }
+ lc->cvec = cvec;
+ lc->cvec[lc->len++] = copy;
+}
+
+/* =========================== Line editing ================================= */
+
+/* We define a very simple "append buffer" structure, that is an heap
+ * allocated string where we can append to. This is useful in order to
+ * write all the escape sequences in a buffer and flush them to the standard
+ * output in a single call, to avoid flickering effects. */
+struct abuf {
+ char *b;
+ int len;
+};
+
+static void abInit(struct abuf *ab) {
+ ab->b = NULL;
+ ab->len = 0;
+}
+
+static void abAppend(struct abuf *ab, const char *s, int len) {
+ char *new = realloc(ab->b,ab->len+len);
+
+ if (new == NULL) return;
+ memcpy(new+ab->len,s,len);
+ ab->b = new;
+ ab->len += len;
+}
+
+static void abFree(struct abuf *ab) {
+ free(ab->b);
+}
+
+/* Single line low level line refresh.
+ *
+ * Rewrite the currently edited line accordingly to the buffer content,
+ * cursor position, and number of columns of the terminal. */
+static void refreshSingleLine(struct linenoiseState *l) {
+ char seq[64];
+ size_t plen = strlen(l->prompt);
+ int fd = l->ofd;
+ char *buf = l->buf;
+ size_t len = l->len;
+ size_t pos = l->pos;
+ struct abuf ab;
+
+ while((plen+pos) >= l->cols) {
+ buf++;
+ len--;
+ pos--;
+ }
+ while (plen+len > l->cols) {
+ len--;
+ }
+
+ abInit(&ab);
+ /* Cursor to left edge */
+ snprintf(seq,64,"\r");
+ abAppend(&ab,seq,strlen(seq));
+ /* Write the prompt and the current buffer content */
+ abAppend(&ab,l->prompt,strlen(l->prompt));
+ abAppend(&ab,buf,len);
+ /* Erase to right */
+ snprintf(seq,64,"\x1b[0K");
+ abAppend(&ab,seq,strlen(seq));
+ /* Move cursor to original position. */
+ snprintf(seq,64,"\r\x1b[%dC", (int)(pos+plen));
+ abAppend(&ab,seq,strlen(seq));
+ if (write(fd,ab.b,ab.len) == -1) {} /* Can't recover from write error. */
+ abFree(&ab);
+}
+
+/* Multi line low level line refresh.
+ *
+ * Rewrite the currently edited line accordingly to the buffer content,
+ * cursor position, and number of columns of the terminal. */
+static void refreshMultiLine(struct linenoiseState *l) {
+ char seq[64];
+ int plen = strlen(l->prompt);
+ int rows = (plen+l->len+l->cols-1)/l->cols; /* rows used by current buf. */
+ int rpos = (plen+l->oldpos+l->cols)/l->cols; /* cursor relative row. */
+ int rpos2; /* rpos after refresh. */
+ int col; /* colum position, zero-based. */
+ int old_rows = l->maxrows;
+ int fd = l->ofd, j;
+ struct abuf ab;
+
+ /* Update maxrows if needed. */
+ if (rows > (int)l->maxrows) l->maxrows = rows;
+
+ /* First step: clear all the lines used before. To do so start by
+ * going to the last row. */
+ abInit(&ab);
+ if (old_rows-rpos > 0) {
+ lndebug("go down %d", old_rows-rpos);
+ snprintf(seq,64,"\x1b[%dB", old_rows-rpos);
+ abAppend(&ab,seq,strlen(seq));
+ }
+
+ /* Now for every row clear it, go up. */
+ for (j = 0; j < old_rows-1; j++) {
+ lndebug("clear+up");
+ snprintf(seq,64,"\r\x1b[0K\x1b[1A");
+ abAppend(&ab,seq,strlen(seq));
+ }
+
+ /* Clean the top line. */
+ lndebug("clear");
+ snprintf(seq,64,"\r\x1b[0K");
+ abAppend(&ab,seq,strlen(seq));
+
+ /* Write the prompt and the current buffer content */
+ abAppend(&ab,l->prompt,strlen(l->prompt));
+ abAppend(&ab,l->buf,l->len);
+
+ /* If we are at the very end of the screen with our prompt, we need to
+ * emit a newline and move the prompt to the first column. */
+ if (l->pos &&
+ l->pos == l->len &&
+ (l->pos+plen) % l->cols == 0)
+ {
+ lndebug("<newline>");
+ abAppend(&ab,"\n",1);
+ snprintf(seq,64,"\r");
+ abAppend(&ab,seq,strlen(seq));
+ rows++;
+ if (rows > (int)l->maxrows) l->maxrows = rows;
+ }
+
+ /* Move cursor to right position. */
+ rpos2 = (plen+l->pos+l->cols)/l->cols; /* current cursor relative row. */
+ lndebug("rpos2 %d", rpos2);
+
+ /* Go up till we reach the expected positon. */
+ if (rows-rpos2 > 0) {
+ lndebug("go-up %d", rows-rpos2);
+ snprintf(seq,64,"\x1b[%dA", rows-rpos2);
+ abAppend(&ab,seq,strlen(seq));
+ }
+
+ /* Set column. */
+ col = (plen+(int)l->pos) % (int)l->cols;
+ lndebug("set col %d", 1+col);
+ if (col)
+ snprintf(seq,64,"\r\x1b[%dC", col);
+ else
+ snprintf(seq,64,"\r");
+ abAppend(&ab,seq,strlen(seq));
+
+ lndebug("\n");
+ l->oldpos = l->pos;
+
+ if (write(fd,ab.b,ab.len) == -1) {} /* Can't recover from write error. */
+ abFree(&ab);
+}
+
+/* Calls the two low level functions refreshSingleLine() or
+ * refreshMultiLine() according to the selected mode. */
+static void refreshLine(struct linenoiseState *l) {
+ if (mlmode)
+ refreshMultiLine(l);
+ else
+ refreshSingleLine(l);
+}
+
+/* Insert the character 'c' at cursor current position.
+ *
+ * On error writing to the terminal -1 is returned, otherwise 0. */
+static int linenoiseEditInsert(struct linenoiseState *l, char c) {
+ if (l->len < l->buflen) {
+ if (l->len == l->pos) {
+ l->buf[l->pos] = c;
+ l->pos++;
+ l->len++;
+ l->buf[l->len] = '\0';
+ if ((!mlmode && l->plen+l->len < l->cols) /* || mlmode */) {
+ /* Avoid a full update of the line in the
+ * trivial case. */
+ if (write(l->ofd,&c,1) == -1) return -1;
+ } else {
+ refreshLine(l);
+ }
+ } else {
+ memmove(l->buf+l->pos+1,l->buf+l->pos,l->len-l->pos);
+ l->buf[l->pos] = c;
+ l->len++;
+ l->pos++;
+ l->buf[l->len] = '\0';
+ refreshLine(l);
+ }
+ }
+ return 0;
+}
+
+/* Move cursor on the left. */
+static void linenoiseEditMoveLeft(struct linenoiseState *l) {
+ if (l->pos > 0) {
+ l->pos--;
+ refreshLine(l);
+ }
+}
+
+/* Move cursor on the right. */
+static void linenoiseEditMoveRight(struct linenoiseState *l) {
+ if (l->pos != l->len) {
+ l->pos++;
+ refreshLine(l);
+ }
+}
+
+/* Move cursor to the start of the line. */
+static void linenoiseEditMoveHome(struct linenoiseState *l) {
+ if (l->pos != 0) {
+ l->pos = 0;
+ refreshLine(l);
+ }
+}
+
+/* Move cursor to the end of the line. */
+static void linenoiseEditMoveEnd(struct linenoiseState *l) {
+ if (l->pos != l->len) {
+ l->pos = l->len;
+ refreshLine(l);
+ }
+}
+
+/* Substitute the currently edited line with the next or previous history
+ * entry as specified by 'dir'. */
+#define LINENOISE_HISTORY_NEXT 0
+#define LINENOISE_HISTORY_PREV 1
+static void linenoiseEditHistoryNext(struct linenoiseState *l, int dir) {
+ if (history_len > 1) {
+ /* Update the current history entry before to
+ * overwrite it with the next one. */
+ free(history[history_len - 1 - l->history_index]);
+ history[history_len - 1 - l->history_index] = strdup(l->buf);
+ /* Show the new entry */
+ l->history_index += (dir == LINENOISE_HISTORY_PREV) ? 1 : -1;
+ if (l->history_index < 0) {
+ l->history_index = 0;
+ return;
+ } else if (l->history_index >= history_len) {
+ l->history_index = history_len-1;
+ return;
+ }
+ strncpy(l->buf,history[history_len - 1 - l->history_index],l->buflen);
+ l->buf[l->buflen-1] = '\0';
+ l->len = l->pos = strlen(l->buf);
+ refreshLine(l);
+ }
+}
+
+/* Delete the character at the right of the cursor without altering the cursor
+ * position. Basically this is what happens with the "Delete" keyboard key. */
+static void linenoiseEditDelete(struct linenoiseState *l) {
+ if (l->len > 0 && l->pos < l->len) {
+ memmove(l->buf+l->pos,l->buf+l->pos+1,l->len-l->pos-1);
+ l->len--;
+ l->buf[l->len] = '\0';
+ refreshLine(l);
+ }
+}
+
+/* Backspace implementation. */
+static void linenoiseEditBackspace(struct linenoiseState *l) {
+ if (l->pos > 0 && l->len > 0) {
+ memmove(l->buf+l->pos-1,l->buf+l->pos,l->len-l->pos);
+ l->pos--;
+ l->len--;
+ l->buf[l->len] = '\0';
+ refreshLine(l);
+ }
+}
+
+/* Delete the previosu word, maintaining the cursor at the start of the
+ * current word. */
+static void linenoiseEditDeletePrevWord(struct linenoiseState *l) {
+ size_t old_pos = l->pos;
+ size_t diff;
+
+ while (l->pos > 0 && l->buf[l->pos-1] == ' ')
+ l->pos--;
+ while (l->pos > 0 && l->buf[l->pos-1] != ' ')
+ l->pos--;
+ diff = old_pos - l->pos;
+ memmove(l->buf+l->pos,l->buf+old_pos,l->len-old_pos+1);
+ l->len -= diff;
+ refreshLine(l);
+}
+
+/* This function is the core of the line editing capability of linenoise.
+ * It expects 'fd' to be already in "raw mode" so that every key pressed
+ * will be returned ASAP to read().
+ *
+ * The resulting string is put into 'buf' when the user type enter, or
+ * when ctrl+d is typed.
+ *
+ * The function returns the length of the current buffer. */
+static int linenoiseEdit(int stdin_fd, int stdout_fd, char *buf, size_t buflen, const char *prompt)
+{
+ struct linenoiseState l;
+
+ /* Populate the linenoise state that we pass to functions implementing
+ * specific editing functionalities. */
+ l.ifd = stdin_fd;
+ l.ofd = stdout_fd;
+ l.buf = buf;
+ l.buflen = buflen;
+ l.prompt = prompt;
+ l.plen = strlen(prompt);
+ l.oldpos = l.pos = 0;
+ l.len = 0;
+ l.cols = getColumns(stdin_fd, stdout_fd);
+ l.maxrows = 0;
+ l.history_index = 0;
+
+ /* Buffer starts empty. */
+ l.buf[0] = '\0';
+ l.buflen--; /* Make sure there is always space for the nulterm */
+
+ /* The latest history entry is always our current buffer, that
+ * initially is just an empty string. */
+ linenoiseHistoryAdd("");
+
+ if (write(l.ofd,prompt,l.plen) == -1) return -1;
+ while(1) {
+ char c;
+ int nread;
+ char seq[3];
+
+ nread = read(l.ifd,&c,1);
+ if (nread <= 0) return l.len;
+
+ /* Only autocomplete when the callback is set. It returns < 0 when
+ * there was an error reading from fd. Otherwise it will return the
+ * character that should be handled next. */
+ if (c == 9 && completionCallback != NULL) {
+ c = completeLine(&l);
+ /* Return on errors */
+ if (c < 0) return l.len;
+ /* Read next character when 0 */
+ if (c == 0) continue;
+ }
+
+ switch(c) {
+ case ENTER: /* enter */
+ history_len--;
+ free(history[history_len]);
+ if (mlmode) linenoiseEditMoveEnd(&l);
+ return (int)l.len;
+ case CTRL_C: /* ctrl-c */
+ errno = EAGAIN;
+ return -1;
+ case BACKSPACE: /* backspace */
+ case 8: /* ctrl-h */
+ linenoiseEditBackspace(&l);
+ break;
+ case CTRL_D: /* ctrl-d, remove char at right of cursor, or if the
+ line is empty, act as end-of-file. */
+ if (l.len > 0) {
+ linenoiseEditDelete(&l);
+ } else {
+ history_len--;
+ free(history[history_len]);
+ return -1;
+ }
+ break;
+ case CTRL_T: /* ctrl-t, swaps current character with previous. */
+ if (l.pos > 0 && l.pos < l.len) {
+ int aux = buf[l.pos-1];
+ buf[l.pos-1] = buf[l.pos];
+ buf[l.pos] = aux;
+ if (l.pos != l.len-1) l.pos++;
+ refreshLine(&l);
+ }
+ break;
+ case CTRL_B: /* ctrl-b */
+ linenoiseEditMoveLeft(&l);
+ break;
+ case CTRL_F: /* ctrl-f */
+ linenoiseEditMoveRight(&l);
+ break;
+ case CTRL_P: /* ctrl-p */
+ linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_PREV);
+ break;
+ case CTRL_N: /* ctrl-n */
+ linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_NEXT);
+ break;
+ case ESC: /* escape sequence */
+ /* Read the next two bytes representing the escape sequence.
+ * Use two calls to handle slow terminals returning the two
+ * chars at different times. */
+ if (read(l.ifd,seq,1) == -1) break;
+ if (read(l.ifd,seq+1,1) == -1) break;
+
+ /* ESC [ sequences. */
+ if (seq[0] == '[') {
+ if (seq[1] >= '0' && seq[1] <= '9') {
+ /* Extended escape, read additional byte. */
+ if (read(l.ifd,seq+2,1) == -1) break;
+ if (seq[2] == '~') {
+ switch(seq[1]) {
+ case '3': /* Delete key. */
+ linenoiseEditDelete(&l);
+ break;
+ }
+ }
+ } else {
+ switch(seq[1]) {
+ case 'A': /* Up */
+ linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_PREV);
+ break;
+ case 'B': /* Down */
+ linenoiseEditHistoryNext(&l, LINENOISE_HISTORY_NEXT);
+ break;
+ case 'C': /* Right */
+ linenoiseEditMoveRight(&l);
+ break;
+ case 'D': /* Left */
+ linenoiseEditMoveLeft(&l);
+ break;
+ case 'H': /* Home */
+ linenoiseEditMoveHome(&l);
+ break;
+ case 'F': /* End*/
+ linenoiseEditMoveEnd(&l);
+ break;
+ }
+ }
+ }
+
+ /* ESC O sequences. */
+ else if (seq[0] == 'O') {
+ switch(seq[1]) {
+ case 'H': /* Home */
+ linenoiseEditMoveHome(&l);
+ break;
+ case 'F': /* End*/
+ linenoiseEditMoveEnd(&l);
+ break;
+ }
+ }
+ break;
+ default:
+ if (linenoiseEditInsert(&l,c)) return -1;
+ break;
+ case CTRL_U: /* Ctrl+u, delete the whole line. */
+ buf[0] = '\0';
+ l.pos = l.len = 0;
+ refreshLine(&l);
+ break;
+ case CTRL_K: /* Ctrl+k, delete from current to end of line. */
+ buf[l.pos] = '\0';
+ l.len = l.pos;
+ refreshLine(&l);
+ break;
+ case CTRL_A: /* Ctrl+a, go to the start of the line */
+ linenoiseEditMoveHome(&l);
+ break;
+ case CTRL_E: /* ctrl+e, go to the end of the line */
+ linenoiseEditMoveEnd(&l);
+ break;
+ case CTRL_L: /* ctrl+l, clear screen */
+ linenoiseClearScreen();
+ refreshLine(&l);
+ break;
+ case CTRL_W: /* ctrl+w, delete previous word */
+ linenoiseEditDeletePrevWord(&l);
+ break;
+ }
+ }
+ return l.len;
+}
+
+/* This special mode is used by linenoise in order to print scan codes
+ * on screen for debugging / development purposes. It is implemented
+ * by the linenoise_example program using the --keycodes option. */
+void linenoisePrintKeyCodes(void) {
+ char quit[4];
+
+ printf("Linenoise key codes debugging mode.\n"
+ "Press keys to see scan codes. Type 'quit' at any time to exit.\n");
+ if (enableRawMode(STDIN_FILENO) == -1) return;
+ memset(quit,' ',4);
+ while(1) {
+ char c;
+ int nread;
+
+ nread = read(STDIN_FILENO,&c,1);
+ if (nread <= 0) continue;
+ memmove(quit,quit+1,sizeof(quit)-1); /* shift string to left. */
+ quit[sizeof(quit)-1] = c; /* Insert current char on the right. */
+ if (memcmp(quit,"quit",sizeof(quit)) == 0) break;
+
+ printf("'%c' %02x (%d) (type quit to exit)\n",
+ isprint(c) ? c : '?', (int)c, (int)c);
+ printf("\r"); /* Go left edge manually, we are in raw mode. */
+ fflush(stdout);
+ }
+ disableRawMode(STDIN_FILENO);
+}
+
+/* This function calls the line editing function linenoiseEdit() using
+ * the STDIN file descriptor set in raw mode. */
+static int linenoiseRaw(char *buf, size_t buflen, const char *prompt) {
+ int count;
+
+ if (buflen == 0) {
+ errno = EINVAL;
+ return -1;
+ }
+ if (!isatty(STDIN_FILENO)) {
+ /* Not a tty: read from file / pipe. */
+ if (fgets(buf, buflen, stdin) == NULL) return -1;
+ count = strlen(buf);
+ if (count && buf[count-1] == '\n') {
+ count--;
+ buf[count] = '\0';
+ }
+ } else {
+ /* Interactive editing. */
+ if (enableRawMode(STDIN_FILENO) == -1) return -1;
+ count = linenoiseEdit(STDIN_FILENO, STDOUT_FILENO, buf, buflen, prompt);
+ disableRawMode(STDIN_FILENO);
+ printf("\n");
+ }
+ return count;
+}
+
+/* The high level function that is the main API of the linenoise library.
+ * This function checks if the terminal has basic capabilities, just checking
+ * for a blacklist of stupid terminals, and later either calls the line
+ * editing function or uses dummy fgets() so that you will be able to type
+ * something even in the most desperate of the conditions. */
+char *linenoise(const char *prompt) {
+ char buf[LINENOISE_MAX_LINE];
+ int count;
+
+ if (isUnsupportedTerm()) {
+ size_t len;
+
+ printf("%s",prompt);
+ fflush(stdout);
+ if (fgets(buf,LINENOISE_MAX_LINE,stdin) == NULL) return NULL;
+ len = strlen(buf);
+ while(len && (buf[len-1] == '\n' || buf[len-1] == '\r')) {
+ len--;
+ buf[len] = '\0';
+ }
+ return strdup(buf);
+ } else {
+ count = linenoiseRaw(buf,LINENOISE_MAX_LINE,prompt);
+ if (count == -1) return NULL;
+ return strdup(buf);
+ }
+}
+
+/* ================================ History ================================= */
+
+/* Free the history, but does not reset it. Only used when we have to
+ * exit() to avoid memory leaks are reported by valgrind & co. */
+static void freeHistory(void) {
+ if (history) {
+ int j;
+
+ for (j = 0; j < history_len; j++)
+ free(history[j]);
+ free(history);
+ }
+}
+
+/* At exit we'll try to fix the terminal to the initial conditions. */
+static void linenoiseAtExit(void) {
+ disableRawMode(STDIN_FILENO);
+ freeHistory();
+}
+
+/* This is the API call to add a new entry in the linenoise history.
+ * It uses a fixed array of char pointers that are shifted (memmoved)
+ * when the history max length is reached in order to remove the older
+ * entry and make room for the new one, so it is not exactly suitable for huge
+ * histories, but will work well for a few hundred of entries.
+ *
+ * Using a circular buffer is smarter, but a bit more complex to handle. */
+int linenoiseHistoryAdd(const char *line) {
+ char *linecopy;
+
+ if (history_max_len == 0) return 0;
+
+ /* Initialization on first call. */
+ if (history == NULL) {
+ history = malloc(sizeof(char*)*history_max_len);
+ if (history == NULL) return 0;
+ memset(history,0,(sizeof(char*)*history_max_len));
+ }
+
+ /* Don't add duplicated lines. */
+ if (history_len && !strcmp(history[history_len-1], line)) return 0;
+
+ /* Add an heap allocated copy of the line in the history.
+ * If we reached the max length, remove the older line. */
+ linecopy = strdup(line);
+ if (!linecopy) return 0;
+ if (history_len == history_max_len) {
+ free(history[0]);
+ memmove(history,history+1,sizeof(char*)*(history_max_len-1));
+ history_len--;
+ }
+ history[history_len] = linecopy;
+ history_len++;
+ return 1;
+}
+
+/* Set the maximum length for the history. This function can be called even
+ * if there is already some history, the function will make sure to retain
+ * just the latest 'len' elements if the new history length value is smaller
+ * than the amount of items already inside the history. */
+int linenoiseHistorySetMaxLen(int len) {
+ char **new;
+
+ if (len < 1) return 0;
+ if (history) {
+ int tocopy = history_len;
+
+ new = malloc(sizeof(char*)*len);
+ if (new == NULL) return 0;
+
+ /* If we can't copy everything, free the elements we'll not use. */
+ if (len < tocopy) {
+ int j;
+
+ for (j = 0; j < tocopy-len; j++) free(history[j]);
+ tocopy = len;
+ }
+ memset(new,0,sizeof(char*)*len);
+ memcpy(new,history+(history_len-tocopy), sizeof(char*)*tocopy);
+ free(history);
+ history = new;
+ }
+ history_max_len = len;
+ if (history_len > history_max_len)
+ history_len = history_max_len;
+ return 1;
+}
+
+/* Save the history in the specified file. On success 0 is returned
+ * otherwise -1 is returned. */
+int linenoiseHistorySave(const char *filename) {
+ FILE *fp = fopen(filename,"w");
+ int j;
+
+ if (fp == NULL) return -1;
+ for (j = 0; j < history_len; j++)
+ fprintf(fp,"%s\n",history[j]);
+ fclose(fp);
+ return 0;
+}
+
+/* Load the history from the specified file. If the file does not exist
+ * zero is returned and no operation is performed.
+ *
+ * If the file exists and the operation succeeded 0 is returned, otherwise
+ * on error -1 is returned. */
+int linenoiseHistoryLoad(const char *filename) {
+ FILE *fp = fopen(filename,"r");
+ char buf[LINENOISE_MAX_LINE];
+
+ if (fp == NULL) return -1;
+
+ while (fgets(buf,LINENOISE_MAX_LINE,fp) != NULL) {
+ char *p;
+
+ p = strchr(buf,'\r');
+ if (!p) p = strchr(buf,'\n');
+ if (p) *p = '\0';
+ linenoiseHistoryAdd(buf);
+ }
+ fclose(fp);
+ return 0;
+}
diff --git a/tools/dev/svnmover/linenoise/linenoise.h b/tools/dev/svnmover/linenoise/linenoise.h
new file mode 100644
index 0000000..0e89179
--- /dev/null
+++ b/tools/dev/svnmover/linenoise/linenoise.h
@@ -0,0 +1,66 @@
+/* linenoise.h -- guerrilla line editing library against the idea that a
+ * line editing lib needs to be 20,000 lines of C code.
+ *
+ * See linenoise.c for more information.
+ *
+ * ------------------------------------------------------------------------
+ *
+ * Copyright (c) 2010-2014, Salvatore Sanfilippo <antirez at gmail dot com>
+ * Copyright (c) 2010-2013, Pieter Noordhuis <pcnoordhuis at gmail dot com>
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __LINENOISE_H
+#define __LINENOISE_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct linenoiseCompletions {
+ size_t len;
+ char **cvec;
+} linenoiseCompletions;
+
+typedef void(linenoiseCompletionCallback)(const char *, linenoiseCompletions *);
+void linenoiseSetCompletionCallback(linenoiseCompletionCallback *);
+void linenoiseAddCompletion(linenoiseCompletions *, const char *);
+
+char *linenoise(const char *prompt);
+int linenoiseHistoryAdd(const char *line);
+int linenoiseHistorySetMaxLen(int len);
+int linenoiseHistorySave(const char *filename);
+int linenoiseHistoryLoad(const char *filename);
+void linenoiseClearScreen(void);
+void linenoiseSetMultiLine(int ml);
+void linenoisePrintKeyCodes(void);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __LINENOISE_H */
diff --git a/tools/dev/svnmover/merge3.c b/tools/dev/svnmover/merge3.c
new file mode 100644
index 0000000..ba32fc5
--- /dev/null
+++ b/tools/dev/svnmover/merge3.c
@@ -0,0 +1,1399 @@
+/*
+ * merge3.c: 3-way merging
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+
+#include <apr_lib.h>
+
+#include "svn_hash.h"
+#include "svn_iter.h"
+#include "svn_client.h"
+#include "svn_error.h"
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_string.h"
+#include "svn_dirent_uri.h"
+
+#include "private/svn_subr_private.h"
+#include "private/svn_branch_repos.h"
+#include "private/svn_branch_nested.h"
+#include "private/svn_branch_compat.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_client_private.h"
+
+#include "svnmover.h"
+
+#include "svn_private_config.h"
+
+
+/* ====================================================================== */
+
+#define is_branch_root_element(branch, eid) \
+ (svn_branch__root_eid(branch) == (eid))
+
+/* Return a string suitable for appending to a displayed element name or
+ * element id to indicate that it is a subbranch root element for SUBBRANCH.
+ * Return "" if SUBBRANCH is null.
+ */
+static const char *
+branch_str(svn_branch__state_t *subbranch,
+ apr_pool_t *result_pool)
+{
+ if (subbranch)
+ return apr_psprintf(result_pool,
+ " (branch %s)",
+ svn_branch__get_id(subbranch, result_pool));
+ return "";
+}
+
+/* Return a string suitable for appending to a displayed element name or
+ * element id to indicate that BRANCH:EID is a subbranch root element.
+ * Return "" if the element is not a subbranch root element.
+ */
+static const char *
+subbranch_str(svn_branch__state_t *branch,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ svn_branch__state_t *subbranch;
+
+ svn_error_clear(svn_branch__get_subbranch_at_eid(branch, &subbranch, eid,
+ result_pool));
+ return branch_str(subbranch, result_pool);
+}
+
+/* Return the longest known relative path leading to element EID in ELEMENTS.
+ *
+ * Set *BASE_EID_P to -1 if this path is rooted at the branch root;
+ * otherwise, set *BASE_EID_P to the EID from which the path is relative,
+ * In the latter case, element *BASE_EID_P is not found in ELEMENTS.
+ *
+ * If CYCLE_CONFLICTS is non-null, it maps each EID involved in a cycle to
+ * [something]. If null, assume there are no cycles.
+ *
+ * If there is a cycle, set *BASE_EID_P to the EID of the nearest element
+ * that is part of a cycle and return the path relative to that element.
+ */
+static const char *
+partial_relpath(int *base_eid_p,
+ svn_element__tree_t *elements,
+ apr_hash_t *cycle_conflicts,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ const char *s = "";
+ int this_eid = eid;
+ svn_element__content_t *e;
+
+ while ((e = svn_element__tree_get(elements, this_eid))
+ && (e->parent_eid != -1))
+ {
+ s = svn_relpath_join(e->name, s, result_pool);
+
+ this_eid = e->parent_eid;
+
+ /* Detect cycles */
+ if (cycle_conflicts && svn_eid__hash_get(cycle_conflicts, this_eid))
+ {
+ /* Cycle detected */
+ e = NULL;
+ break;
+ }
+ }
+
+ if (base_eid_p)
+ {
+ if (e)
+ {
+ /* We reached the root element */
+ *base_eid_p = -1;
+ }
+ else
+ {
+ /* We came to this nonexistent or cyclic parent element */
+ *base_eid_p = this_eid;
+ }
+ }
+ return s;
+}
+
+/* */
+static svn_error_t *
+display_relpath(const char **s_p,
+ svn_branch__state_t *branch,
+ apr_hash_t *cycle_conflicts,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ svn_element__tree_t *elements;
+ int base_eid;
+ const char *s;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, result_pool));
+ s = partial_relpath(&base_eid, elements, cycle_conflicts, eid, result_pool);
+
+ if (base_eid == -1)
+ s = apr_psprintf(result_pool, "/%s", s);
+ else if (base_eid == eid)
+ s = "<nil>";
+ else if (the_ui_mode == UI_MODE_PATHS)
+ s = svn_relpath_join("...", s, result_pool);
+ else
+ {
+ const char *eid_str = apr_psprintf(result_pool, "<e%d>", base_eid);
+ s = svn_relpath_join(eid_str, s, result_pool);
+ }
+ *s_p = s;
+ return SVN_NO_ERROR;
+}
+
+/* Set *S_P to a string describing the identity of element EID. */
+static svn_error_t *
+merged_element_id_str(const char **s_p,
+ conflict_storage_t *conflict_storage,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ const char *s_yca, *s_src, *s_tgt, *s_merged;
+
+ SVN_ERR(display_relpath(&s_yca, conflict_storage->yca_branch, NULL,
+ eid, result_pool));
+ SVN_ERR(display_relpath(&s_src, conflict_storage->src_branch, NULL,
+ eid, result_pool));
+ SVN_ERR(display_relpath(&s_tgt, conflict_storage->tgt_branch, conflict_storage->cycle_conflicts,
+ eid, result_pool));
+ SVN_ERR(display_relpath(&s_merged, conflict_storage->merged_branch,
+ conflict_storage->cycle_conflicts,
+ eid, result_pool));
+ *s_p = apr_psprintf(result_pool,
+ "yca=%s, side1=%s, side2=%s, merged=%s",
+ s_yca, s_src, s_tgt, s_merged);
+ if (the_ui_mode == UI_MODE_EIDS)
+ {
+ *s_p = apr_psprintf(result_pool,
+ "e%d (%s)", eid, *s_p);
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Options to control how strict the merge is about detecting conflicts.
+ *
+ * The options affect cases that, depending on the user's preference, could
+ * either be considered a conflict or be merged to a deterministic result.
+ *
+ * The set of options is flexible and may be extended in future.
+ */
+typedef struct merge_conflict_policy_t
+{
+ /* Whether to merge delete-vs-delete */
+ svn_boolean_t merge_double_delete;
+ /* Whether to merge add-vs-add (with same parent/name/payload) */
+ svn_boolean_t merge_double_add;
+ /* Whether to merge reparent-vs-reparent (with same parent) */
+ svn_boolean_t merge_double_reparent;
+ /* Whether to merge rename-vs-rename (with same name) */
+ svn_boolean_t merge_double_rename;
+ /* Whether to merge modify-vs-modify (with same payload) */
+ svn_boolean_t merge_double_modify;
+ /* Possible additional controls: */
+ /* merge (parent, name, props, text) independently or as a group */
+ /* merge (parent, name) independently or as a group */
+ /* merge (props, text) independently or as a group */
+} merge_conflict_policy_t;
+
+/* An element-merge conflict description.
+ */
+typedef struct element_merge3_conflict_t
+{
+ svn_element__content_t *yca;
+ svn_element__content_t *side1;
+ svn_element__content_t *side2;
+} element_merge3_conflict_t;
+
+static element_merge3_conflict_t *
+element_merge3_conflict_create(svn_element__content_t *yca,
+ svn_element__content_t *side1,
+ svn_element__content_t *side2,
+ apr_pool_t *result_pool)
+{
+ element_merge3_conflict_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ c->yca = yca ? svn_element__content_dup(yca, result_pool) : NULL;
+ c->side1 = side1 ? svn_element__content_dup(side1, result_pool) : NULL;
+ c->side2 = side2 ? svn_element__content_dup(side2, result_pool) : NULL;
+ return c;
+}
+
+static element_merge3_conflict_t *
+element_merge3_conflict_dup(element_merge3_conflict_t *old_conflict,
+ apr_pool_t *result_pool)
+{
+ return element_merge3_conflict_create(old_conflict->yca,
+ old_conflict->side1,
+ old_conflict->side2, result_pool);
+}
+
+static svn_error_t *
+element_merge3_conflict_str(const char **s_p,
+ conflict_storage_t *conflict_storage,
+ element_merge3_conflict_t *c,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ const char *id_str;
+
+ SVN_ERR(merged_element_id_str(&id_str, conflict_storage,
+ eid, result_pool));
+ *s_p = apr_psprintf(result_pool,
+ "element-merge conflict:\n"
+ " %s",
+ id_str);
+ return SVN_NO_ERROR;
+}
+
+/* A name-clash conflict description.
+ */
+typedef struct name_clash_conflict_t
+{
+ int parent_eid;
+ const char *name;
+ /* All EIDs that conflict with each other: hash of (eid -> irrelevant). */
+ apr_hash_t *elements;
+} name_clash_conflict_t;
+
+static name_clash_conflict_t *
+name_clash_conflict_create(int parent_eid,
+ const char *name,
+ apr_pool_t *result_pool)
+{
+ name_clash_conflict_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ c->parent_eid = parent_eid;
+ c->name = apr_pstrdup(result_pool, name);
+ c->elements = apr_hash_make(result_pool);
+ return c;
+}
+
+static svn_error_t *
+name_clash_conflict_str(const char **s_p,
+ conflict_storage_t *conflict_storage,
+ name_clash_conflict_t *c,
+ apr_pool_t *result_pool)
+{
+ apr_hash_index_t *hi2;
+ const char *s = "name-clash conflict: elements";
+
+ for (hi2 = apr_hash_first(result_pool, c->elements);
+ hi2; hi2 = apr_hash_next(hi2))
+ {
+ int eid = svn_eid__hash_this_key(hi2);
+ const char *id_str;
+
+ SVN_ERR(merged_element_id_str(&id_str, conflict_storage,
+ eid, result_pool));
+ s = apr_psprintf(result_pool,
+ "%s\n"
+ " %s",
+ s, id_str);
+ }
+ *s_p = s;
+ return SVN_NO_ERROR;
+}
+
+/* A cycle conflict description.
+ */
+typedef struct cycle_conflict_t
+{
+ /* All EIDs that conflict with each other: hash of (eid -> irrelevant). */
+ apr_hash_t *elements;
+} cycle_conflict_t;
+
+static cycle_conflict_t *
+cycle_conflict_create(apr_pool_t *result_pool)
+{
+ cycle_conflict_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ c->elements = apr_hash_make(result_pool);
+ return c;
+}
+
+static svn_error_t *
+cycle_conflict_str(const char **s_p,
+ conflict_storage_t *conflict_storage,
+ cycle_conflict_t *c,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ svn_element__content_t *element = svn_eid__hash_get(c->elements, eid);
+ const char *s
+ = apr_psprintf(result_pool, "element '%s' has cyclic parentage",
+ element->name);
+ int this_eid = eid;
+
+ do
+ {
+ const char *id_str;
+
+ SVN_ERR(merged_element_id_str(&id_str, conflict_storage,
+ this_eid, result_pool));
+ s = apr_psprintf(result_pool,
+ "%s\n"
+ " %s",
+ s, id_str);
+ element = svn_eid__hash_get(c->elements, this_eid);
+ this_eid = element->parent_eid;
+ }
+ while (this_eid != eid);
+
+ *s_p = s;
+ return SVN_NO_ERROR;
+}
+
+/* An orphan conflict description.
+ */
+typedef struct orphan_conflict_t
+{
+ svn_element__content_t *element;
+} orphan_conflict_t;
+
+static orphan_conflict_t *
+orphan_conflict_create(svn_element__content_t *element,
+ apr_pool_t *result_pool)
+{
+ orphan_conflict_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ c->element = svn_element__content_dup(element, result_pool);
+ return c;
+}
+
+static svn_error_t *
+orphan_conflict_str(const char **s_p,
+ conflict_storage_t *conflict_storage,
+ orphan_conflict_t *c,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ const char *id_str;
+ const char *parent_id_str;
+
+ SVN_ERR(merged_element_id_str(&id_str, conflict_storage,
+ eid, result_pool));
+ SVN_ERR(merged_element_id_str(&parent_id_str, conflict_storage,
+ c->element->parent_eid, result_pool));
+ *s_p = apr_psprintf(result_pool,
+ "orphan conflict: parent (%s) does not exist "
+ "for the following child:\n"
+ " %s",
+ parent_id_str, id_str);
+ return SVN_NO_ERROR;
+}
+
+/* */
+static conflict_storage_t *
+conflict_storage_create(apr_pool_t *result_pool)
+{
+ conflict_storage_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ return c;
+}
+
+svn_error_t *
+svnmover_display_conflicts(conflict_storage_t *conflict_storage,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_index_t *hi;
+
+ svnmover_notify(_("Conflicts:"));
+
+ for (hi = apr_hash_first(scratch_pool,
+ conflict_storage->element_merge_conflicts);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ element_merge3_conflict_t *c = apr_hash_this_val(hi);
+ const char *id_string = apr_psprintf(scratch_pool, "e%d", eid);
+ const char *c_str;
+
+ SVN_ERR(element_merge3_conflict_str(&c_str, conflict_storage,
+ c, eid, scratch_pool));
+ if (the_ui_mode == UI_MODE_EIDS)
+ {
+ svnmover_notify(" %s: %s", id_string, c_str);
+ }
+ else
+ {
+ svnmover_notify(" %s", c_str);
+ }
+ }
+ for (hi = apr_hash_first(scratch_pool,
+ conflict_storage->name_clash_conflicts);
+ hi; hi = apr_hash_next(hi))
+ {
+ const char *id_string = apr_hash_this_key(hi);
+ name_clash_conflict_t *c = apr_hash_this_val(hi);
+ const char *c_str;
+
+ SVN_ERR(name_clash_conflict_str(&c_str, conflict_storage,
+ c, scratch_pool));
+ if (the_ui_mode == UI_MODE_EIDS)
+ {
+ svnmover_notify(" %s: %s", id_string, c_str);
+ }
+ else
+ {
+ svnmover_notify(" %s", c_str);
+ }
+ }
+ for (hi = apr_hash_first(scratch_pool,
+ conflict_storage->cycle_conflicts);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ cycle_conflict_t *c = apr_hash_this_val(hi);
+ const char *id_string = apr_psprintf(scratch_pool, "e%d", eid);
+ const char *c_str;
+
+ SVN_ERR(cycle_conflict_str(&c_str, conflict_storage,
+ c, eid, scratch_pool));
+ if (the_ui_mode == UI_MODE_EIDS)
+ {
+ svnmover_notify(" %s: %s", id_string, c_str);
+ }
+ else
+ {
+ svnmover_notify(" %s", c_str);
+ }
+ }
+ for (hi = apr_hash_first(scratch_pool,
+ conflict_storage->orphan_conflicts);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ orphan_conflict_t *c = apr_hash_this_val(hi);
+ const char *id_string = apr_psprintf(scratch_pool, "e%d", eid);
+ const char *c_str;
+
+ SVN_ERR(orphan_conflict_str(&c_str, conflict_storage,
+ c, eid, scratch_pool));
+ if (the_ui_mode == UI_MODE_EIDS)
+ {
+ svnmover_notify(" %s: %s", id_string, c_str);
+ }
+ else
+ {
+ svnmover_notify(" %s", c_str);
+ }
+ }
+
+ svnmover_notify(_("Summary of conflicts:\n"
+ " %d element-merge conflicts\n"
+ " %d name-clash conflicts\n"
+ " %d cycle conflicts\n"
+ " %d orphan conflicts\n"),
+ apr_hash_count(conflict_storage->element_merge_conflicts),
+ apr_hash_count(conflict_storage->name_clash_conflicts),
+ apr_hash_count(conflict_storage->cycle_conflicts),
+ apr_hash_count(conflict_storage->orphan_conflicts));
+ return SVN_NO_ERROR;
+}
+
+enum conflict_kind_t { conflict_kind_element_merge,
+ conflict_kind_clash,
+ conflict_kind_cycle,
+ conflict_kind_orphan };
+
+/* */
+typedef struct conflict_object_t
+{
+ enum conflict_kind_t conflict_kind;
+ apr_hash_t *conflicts;
+ const void *key;
+} conflict_object_t;
+
+/* */
+static conflict_object_t *
+conflict_object_create(enum conflict_kind_t conflict_kind,
+ apr_hash_t *conflicts,
+ const void *key,
+ apr_pool_t *result_pool)
+{
+ conflict_object_t *c = apr_pcalloc(result_pool, sizeof(*c));
+
+ c->conflict_kind = conflict_kind;
+ c->conflicts = conflicts;
+ c->key = (conflict_kind == conflict_kind_clash)
+ ? apr_pstrdup(result_pool, key)
+ : apr_pmemdup(result_pool, key, sizeof(int));
+ return c;
+}
+
+static svn_error_t *
+find_conflict(conflict_object_t **conflict_p,
+ conflict_storage_t *conflicts,
+ const char *id_string,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ *conflict_p = NULL;
+
+ if (id_string[0] == 'e')
+ {
+ int which_eid = atoi(id_string + 1);
+
+ if (svn_eid__hash_get(conflicts->element_merge_conflicts, which_eid))
+ {
+ *conflict_p
+ = conflict_object_create(conflict_kind_element_merge,
+ conflicts->element_merge_conflicts,
+ &which_eid, result_pool);
+ }
+ if (svn_eid__hash_get(conflicts->cycle_conflicts, which_eid))
+ {
+ *conflict_p
+ = conflict_object_create(conflict_kind_cycle,
+ conflicts->cycle_conflicts,
+ &which_eid, result_pool);
+ }
+ if (svn_eid__hash_get(conflicts->orphan_conflicts, which_eid))
+ {
+ *conflict_p
+ = conflict_object_create(conflict_kind_orphan,
+ conflicts->orphan_conflicts,
+ &which_eid, result_pool);
+ }
+ }
+ else
+ {
+ if (svn_hash_gets(conflicts->name_clash_conflicts, id_string))
+ {
+ *conflict_p
+ = conflict_object_create(conflict_kind_clash,
+ conflicts->name_clash_conflicts,
+ id_string, result_pool);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svnmover_conflict_resolved(conflict_storage_t *conflicts,
+ const char *id_string,
+ apr_pool_t *scratch_pool)
+{
+ conflict_object_t *conflict;
+
+ SVN_ERR(find_conflict(&conflict, conflicts, id_string,
+ scratch_pool, scratch_pool));
+ if (! conflict)
+ {
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("Conflict '%s' not found"), id_string);
+ }
+
+ if (conflict->conflict_kind == conflict_kind_clash)
+ {
+ svn_hash_sets(conflict->conflicts, conflict->key, NULL);
+ }
+ else
+ {
+ apr_hash_set(conflict->conflicts, conflict->key, sizeof (int), NULL);
+ }
+ svnmover_notify("Marked conflict '%s' as resolved", id_string);
+ return SVN_NO_ERROR;
+}
+
+svn_boolean_t
+svnmover_any_conflicts(const conflict_storage_t *conflicts)
+{
+ return conflicts
+ && (apr_hash_count(conflicts->element_merge_conflicts)
+ || apr_hash_count(conflicts->name_clash_conflicts)
+ || apr_hash_count(conflicts->cycle_conflicts)
+ || apr_hash_count(conflicts->orphan_conflicts));
+}
+
+/* Merge the payload for one element.
+ *
+ * If there is no conflict, set *CONFLICT_P to FALSE and *RESULT_P to the
+ * merged element; otherwise set *CONFLICT_P to TRUE and *RESULT_P to NULL.
+ * Note that *RESULT_P can be null, indicating a deletion.
+ *
+ * This handles any case where at least one of (SIDE1, SIDE2, YCA) exists.
+ *
+ * Allocate the result in RESULT_POOL and/or as pointers to the inputs.
+ */
+static void
+payload_merge(svn_element__payload_t **result_p,
+ svn_boolean_t *conflict_p,
+ int eid,
+ svn_element__payload_t *side1,
+ svn_element__payload_t *side2,
+ svn_element__payload_t *yca,
+ const merge_conflict_policy_t *policy,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_boolean_t conflict = FALSE;
+ svn_element__payload_t *result = NULL;
+
+ if (yca && side1 && side2)
+ {
+ if (svn_element__payload_equal(side1, yca, scratch_pool))
+ {
+ result = side2;
+ }
+ else if (svn_element__payload_equal(side2, yca, scratch_pool))
+ {
+ result = side1;
+ }
+ else if (policy->merge_double_modify
+ && svn_element__payload_equal(side1, side2, scratch_pool))
+ {
+ /*SVN_DBG(("e%d double modify: ... -> { ... | ... }",
+ eid));*/
+ result = side1;
+ }
+ else
+ {
+ /* ### Need not conflict if can merge props and text separately. */
+
+ /*SVN_DBG(("e%d conflict: payload: ... -> { ... | ... }",
+ eid));*/
+ conflict = TRUE;
+ }
+ }
+
+ *result_p = result;
+ *conflict_p = conflict;
+}
+
+/* Merge the content for one element.
+ *
+ * If there is no conflict, set *CONFLICT_P to FALSE and *RESULT_P to the
+ * merged element; otherwise set *CONFLICT_P to TRUE and *RESULT_P to NULL.
+ * Note that *RESULT_P can be null, indicating a deletion.
+ *
+ * This handles any case where at least one of (SIDE1, SIDE2, YCA) exists.
+ *
+ * Allocate the result in RESULT_POOL and/or as pointers to the inputs.
+ */
+static void
+element_merge(svn_element__content_t **result_p,
+ element_merge3_conflict_t **conflict_p,
+ int eid,
+ svn_element__content_t *side1,
+ svn_element__content_t *side2,
+ svn_element__content_t *yca,
+ const merge_conflict_policy_t *policy,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_boolean_t same1 = svn_element__content_equal(yca, side1, scratch_pool);
+ svn_boolean_t same2 = svn_element__content_equal(yca, side2, scratch_pool);
+ svn_boolean_t conflict = FALSE;
+ svn_element__content_t *result = NULL;
+
+ if (same1)
+ {
+ result = side2;
+ }
+ else if (same2)
+ {
+ result = side1;
+ }
+ else if (yca && side1 && side2)
+ {
+ /* All three sides are different, and all exist */
+ result = apr_pmemdup(result_pool, yca, sizeof(*result));
+
+ /* merge the parent-eid */
+ if (side1->parent_eid == yca->parent_eid)
+ {
+ result->parent_eid = side2->parent_eid;
+ }
+ else if (side2->parent_eid == yca->parent_eid)
+ {
+ result->parent_eid = side1->parent_eid;
+ }
+ else if (policy->merge_double_reparent
+ && side1->parent_eid == side2->parent_eid)
+ {
+ /*SVN_DBG(("e%d double reparent: e%d -> { e%d | e%d }",
+ eid, yca->parent_eid, side1->parent_eid, side2->parent_eid));*/
+ result->parent_eid = side1->parent_eid;
+ }
+ else
+ {
+ /*SVN_DBG(("e%d conflict: parent: e%d -> { e%d | e%d }",
+ eid, yca->parent_eid, side1->parent_eid, side2->parent_eid));*/
+ conflict = TRUE;
+ }
+
+ /* merge the name */
+ if (strcmp(side1->name, yca->name) == 0)
+ {
+ result->name = side2->name;
+ }
+ else if (strcmp(side2->name, yca->name) == 0)
+ {
+ result->name = side1->name;
+ }
+ else if (policy->merge_double_rename
+ && strcmp(side1->name, side2->name) == 0)
+ {
+ /*SVN_DBG(("e%d double rename: %s -> { %s | %s }",
+ eid, yca->name, side1->name, side2->name));*/
+ result->name = side1->name;
+ }
+ else
+ {
+ /*SVN_DBG(("e%d conflict: name: %s -> { %s | %s }",
+ eid, yca->name, side1->name, side2->name));*/
+ conflict = TRUE;
+ }
+
+ /* merge the payload */
+ {
+ svn_boolean_t payload_conflict;
+
+ payload_merge(&result->payload, &payload_conflict,
+ eid, side1->payload, side2->payload, yca->payload,
+ policy, result_pool, scratch_pool);
+ if (payload_conflict)
+ conflict = TRUE;
+ }
+ }
+ else if (! side1 && ! side2)
+ {
+ /* Double delete (as we assume at least one of YCA/SIDE1/SIDE2 exists) */
+ if (policy->merge_double_delete)
+ {
+ /*SVN_DBG(("e%d double delete",
+ eid));*/
+ result = side1;
+ }
+ else
+ {
+ /*SVN_DBG(("e%d conflict: delete vs. delete",
+ eid));*/
+ conflict = TRUE;
+ }
+ }
+ else if (side1 && side2)
+ {
+ /* Double add (as we already handled the case where YCA also exists) */
+ /* May be allowed for equal content of a normal element (not subbranch) */
+ if (policy->merge_double_add
+ && !side1->payload->is_subbranch_root
+ && !side2->payload->is_subbranch_root
+ && svn_element__content_equal(side1, side2, scratch_pool))
+ {
+ /*SVN_DBG(("e%d double add",
+ eid));*/
+ result = side1;
+ }
+ else
+ {
+ /*SVN_DBG(("e%d conflict: add vs. add (%s)",
+ eid,
+ svn_element_content_equal(side1, side2, scratch_pool)
+ ? "same content" : "different content"));*/
+ conflict = TRUE;
+ }
+ }
+ else
+ {
+ /* The remaining cases must be delete vs. modify */
+ /*SVN_DBG(("e%d conflict: delete vs. modify: %d -> { %d | %d }",
+ eid, !!yca, !!side1, !!side2));*/
+ conflict = TRUE;
+ }
+
+ *result_p = result;
+ *conflict_p
+ = conflict ? element_merge3_conflict_create(yca, side1, side2,
+ result_pool) : NULL;
+}
+
+static svn_error_t *
+branch_merge_subtree_r(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ conflict_storage_t **conflict_storage_p,
+ const svn_branch__el_rev_id_t *src,
+ const svn_branch__el_rev_id_t *tgt,
+ const svn_branch__el_rev_id_t *yca,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Merge the subbranch of {SRC, TGT, YCA} found at EID.
+ */
+static svn_error_t *
+merge_subbranch(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ const svn_branch__el_rev_id_t *src,
+ const svn_branch__el_rev_id_t *tgt,
+ const svn_branch__el_rev_id_t *yca,
+ int eid,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__state_t *src_subbranch;
+ svn_branch__state_t *tgt_subbranch;
+ svn_branch__state_t *yca_subbranch;
+ svn_branch__el_rev_id_t *subbr_src = NULL;
+ svn_branch__el_rev_id_t *subbr_tgt = NULL;
+ svn_branch__el_rev_id_t *subbr_yca = NULL;
+
+ SVN_ERR(svn_branch__get_subbranch_at_eid(src->branch, &src_subbranch,
+ eid, scratch_pool));
+ SVN_ERR(svn_branch__get_subbranch_at_eid(tgt->branch, &tgt_subbranch,
+ eid, scratch_pool));
+ SVN_ERR(svn_branch__get_subbranch_at_eid(yca->branch, &yca_subbranch,
+ eid, scratch_pool));
+ if (src_subbranch)
+ subbr_src = svn_branch__el_rev_id_create(
+ src_subbranch, svn_branch__root_eid(src_subbranch),
+ src->rev, scratch_pool);
+ if (tgt_subbranch)
+ subbr_tgt = svn_branch__el_rev_id_create(
+ tgt_subbranch, svn_branch__root_eid(tgt_subbranch),
+ tgt->rev, scratch_pool);
+ if (yca_subbranch)
+ subbr_yca = svn_branch__el_rev_id_create(
+ yca_subbranch, svn_branch__root_eid(yca_subbranch),
+ yca->rev, scratch_pool);
+
+ if (subbr_src && subbr_tgt && subbr_yca) /* ?edit vs. ?edit */
+ {
+ conflict_storage_t *conflict_storage;
+ const char *new_branch_id
+ = svn_branch__id_nest(svn_branch__get_id(edit_branch, scratch_pool),
+ eid, scratch_pool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(tgt_subbranch->txn->rev,
+ svn_branch__get_id(tgt_subbranch,
+ scratch_pool),
+ svn_branch__root_eid(tgt_subbranch),
+ scratch_pool);
+ svn_branch__state_t *edit_subbranch;
+
+ SVN_ERR(svn_branch__txn_open_branch(edit_txn, &edit_subbranch,
+ new_branch_id, from->eid, from,
+ scratch_pool, scratch_pool));
+
+ /* subbranch possibly changed in source => merge */
+ SVN_ERR(branch_merge_subtree_r(edit_txn, edit_subbranch,
+ &conflict_storage,
+ subbr_src, subbr_tgt, subbr_yca,
+ scratch_pool, scratch_pool));
+ /* ### store this branch's conflict_storage somewhere ... */
+ }
+ else if (subbr_src && subbr_yca) /* ?edit vs. delete */
+ {
+ /* ### possible conflict (edit vs. delete) */
+ }
+ else if (subbr_tgt && subbr_yca) /* delete vs. ?edit */
+ {
+ /* ### possible conflict (delete vs. edit) */
+ }
+ else if (subbr_src && subbr_tgt) /* double add */
+ {
+ /* ### conflict */
+ }
+ else if (subbr_src) /* added on source branch */
+ {
+ const char *new_branch_id
+ = svn_branch__id_nest(svn_branch__get_id(edit_branch, scratch_pool),
+ eid, scratch_pool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(src_subbranch->txn->rev,
+ svn_branch__get_id(src_subbranch,
+ scratch_pool),
+ svn_branch__root_eid(src_subbranch),
+ scratch_pool);
+
+ SVN_ERR(svn_branch__txn_open_branch(edit_txn, NULL /*new_branch_p*/,
+ new_branch_id, from->eid, from,
+ scratch_pool, scratch_pool));
+ }
+ else if (subbr_tgt) /* added on target branch */
+ {
+ const char *new_branch_id
+ = svn_branch__id_nest(svn_branch__get_id(edit_branch, scratch_pool),
+ eid, scratch_pool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(tgt_subbranch->txn->rev,
+ svn_branch__get_id(tgt_subbranch,
+ scratch_pool),
+ svn_branch__root_eid(tgt_subbranch),
+ scratch_pool);
+
+ SVN_ERR(svn_branch__txn_open_branch(edit_txn, NULL /*new_branch_p*/,
+ new_branch_id, from->eid, from,
+ scratch_pool, scratch_pool));
+ }
+ else if (subbr_yca) /* double delete */
+ {
+ /* ### conflict? policy option? */
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* */
+static int
+sort_compare_items_by_peid_and_name(const svn_sort__item_t *a,
+ const svn_sort__item_t *b)
+{
+ svn_element__content_t *element_a = a->value;
+ svn_element__content_t *element_b = b->value;
+
+ if (element_a->parent_eid != element_b->parent_eid)
+ return element_a->parent_eid - element_b->parent_eid;
+ return strcmp(element_a->name, element_b->name);
+}
+
+/* Return all (key -> name_clash_conflict_t) name clash conflicts in BRANCH.
+ */
+static svn_error_t *
+detect_clashes(apr_hash_t **clashes_p,
+ svn_branch__state_t *branch,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *clashes = apr_hash_make(result_pool);
+ svn_element__tree_t *elements;
+ svn_eid__hash_iter_t *ei;
+ int prev_eid = -1;
+ svn_element__content_t *prev_element = NULL;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (SVN_EID__HASH_ITER_SORTED(ei, elements->e_map,
+ sort_compare_items_by_peid_and_name,
+ scratch_pool))
+ {
+ int eid = ei->eid;
+ svn_element__content_t *element = ei->val;
+
+ if (prev_element
+ && element->parent_eid == prev_element->parent_eid
+ && strcmp(element->name, prev_element->name) == 0)
+ {
+ const char *key = apr_psprintf(result_pool, "%d/%s",
+ element->parent_eid, element->name);
+ name_clash_conflict_t *c;
+
+ c = svn_hash_gets(clashes, key);
+ if (!c)
+ {
+ c = name_clash_conflict_create(
+ element->parent_eid, element->name,
+ result_pool);
+ svn_hash_sets(clashes, key, c);
+ }
+ svn_eid__hash_set(c->elements, eid, &c);
+ svn_eid__hash_set(c->elements, prev_eid, &c);
+ }
+ prev_eid = eid;
+ prev_element = element;
+ }
+
+ *clashes_p = clashes;
+ return SVN_NO_ERROR;
+}
+
+/* For each element in the cycle starting at ONE_EID in EIDS_VISITED,
+ * record an entry in CYCLES[this_eid] mapping to a cycle_conflict_t.
+ * Each such new entry will point to the same cycle_conflict_t. This
+ * cycle_conflict_t will contain the list of elements in the cycle.
+ *
+ * ONE_EID should identify a member of a simple cycle, not an element
+ * that merely has a parent or ancestor in a simple cycle.
+ */
+static svn_error_t *
+record_cycle(apr_hash_t *cycles,
+ apr_hash_t *eids_visited,
+ int one_eid,
+ apr_pool_t *result_pool)
+{
+ cycle_conflict_t *c = cycle_conflict_create(result_pool);
+ int this_eid = one_eid;
+
+ do
+ {
+ svn_element__content_t *element
+ = svn_eid__hash_get(eids_visited, this_eid);
+
+ svn_eid__hash_set(cycles, this_eid, c);
+ svn_eid__hash_set(c->elements, this_eid, element);
+
+ this_eid = element->parent_eid;
+ assert(this_eid != -1);
+ }
+ while (this_eid != one_eid);
+
+ return SVN_NO_ERROR;
+}
+
+/* Return all (eid -> cycle_conflict_t) cycle conflicts in BRANCH.
+
+ * ### This implementation is crude: it finds all cycles, but doesn't
+ * report them minimally. It reports each element that leads to a cycle,
+ * without isolating the minimal cycles nor eliminating duplicates.
+ */
+static svn_error_t *
+detect_cycles(apr_hash_t **cycles_p,
+ svn_branch__state_t *branch,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *cycles = apr_hash_make(result_pool);
+ apr_hash_index_t *hi;
+ svn_element__tree_t *elements;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, elements->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t *element = apr_hash_this_val(hi);
+ apr_hash_t *eids_visited;
+ int this_eid;
+
+ /* If the element EID is already recorded as a member of a cycle,
+ there's nothing more to do for it. */
+ if (svn_eid__hash_get(cycles, eid))
+ {
+ continue;
+ }
+
+ eids_visited = apr_hash_make(scratch_pool);
+
+ /* See if we can trace the parentage of EID back to the branch root
+ without finding a cycle. If we find a cycle, store a conflict. */
+ for (this_eid = eid;
+ element = svn_eid__hash_get(elements->e_map, this_eid),
+ element && element->parent_eid != -1;
+ this_eid = element->parent_eid)
+ {
+ svn_eid__hash_set(eids_visited, this_eid, element);
+
+ /* If the element EID is attached to an element of a previously
+ detected cycle, then it's not interesting in itself. */
+ if (svn_eid__hash_get(cycles, element->parent_eid))
+ {
+ break;
+ }
+ /* If this element's parent-EID is already in the path of EIDs
+ visited from EID to the root, then we have found a cycle. */
+ if (svn_eid__hash_get(eids_visited, element->parent_eid))
+ {
+ SVN_ERR(record_cycle(cycles, eids_visited, this_eid,
+ result_pool));
+ break;
+ }
+ }
+ }
+
+ *cycles_p = cycles;
+ return SVN_NO_ERROR;
+}
+
+/* Return all (eid -> orphan_conflict_t) orphan conflicts in BRANCH.
+ */
+static svn_error_t *
+detect_orphans(apr_hash_t **orphans_p,
+ svn_branch__state_t *branch,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *orphans = apr_hash_make(result_pool);
+ apr_hash_index_t *hi;
+ svn_element__tree_t *elements;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, elements->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t *element = apr_hash_this_val(hi);
+
+ if (eid != elements->root_eid
+ && ! svn_element__tree_get(elements, element->parent_eid))
+ {
+ orphan_conflict_t *c;
+
+ c = orphan_conflict_create(element, result_pool);
+ svn_eid__hash_set(orphans, eid, c);
+ }
+ }
+
+ *orphans_p = orphans;
+ return SVN_NO_ERROR;
+}
+
+/* Merge ...
+ *
+ * The elements to merge are the union of the elements in the three input
+ * subtrees (SRC, TGT, YCA).
+ *
+ * Merge any sub-branches in the same way, recursively.
+ *
+ * ### TODO: Store the merge result separately, without overwriting the
+ * target input state, so that the three input states are all available
+ * for reference while resolving conflicts.
+ */
+static svn_error_t *
+branch_merge_subtree_r(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ conflict_storage_t **conflict_storage_p,
+ const svn_branch__el_rev_id_t *src,
+ const svn_branch__el_rev_id_t *tgt,
+ const svn_branch__el_rev_id_t *yca,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__subtree_t *s_src, *s_tgt, *s_yca;
+ apr_hash_t *diff_yca_src, *diff_yca_tgt;
+ apr_hash_t *e_conflicts = apr_hash_make(result_pool);
+ conflict_storage_t *conflict_storage = conflict_storage_create(result_pool);
+ svn_element__tree_t *src_elements, *tgt_elements, *yca_elements;
+ apr_hash_t *all_elements;
+ svn_eid__hash_iter_t *ei;
+ const merge_conflict_policy_t policy = { TRUE, TRUE, TRUE, TRUE, TRUE };
+ apr_pool_t *iterpool = svn_pool_create(scratch_pool);
+
+ /*SVN_DBG(("merge src: r%2ld %s e%3d",
+ src->rev,
+ svn_branch__get_id(src->branch, scratch_pool), src->eid));*/
+ /*SVN_DBG(("merge tgt: r%2ld %s e%3d",
+ tgt->rev,
+ svn_branch__get_id(tgt->branch, scratch_pool), tgt->eid));*/
+ /*SVN_DBG(("merge yca: r%2ld %s e%3d",
+ yca->rev,
+ svn_branch__get_id(yca->branch, scratch_pool), yca->eid));*/
+
+ svnmover_notify_v("merging into branch %s",
+ edit_branch->bid);
+ /*
+ for (eid, diff1) in element_differences(YCA, FROM):
+ diff2 = element_diff(eid, YCA, TO)
+ if diff1 and diff2:
+ result := element_merge(diff1, diff2)
+ elif diff1:
+ result := diff1.right
+ # else no change
+ */
+ SVN_ERR(svn_branch__get_subtree(src->branch, &s_src, src->eid, scratch_pool));
+ SVN_ERR(svn_branch__get_subtree(tgt->branch, &s_tgt, tgt->eid, scratch_pool));
+ SVN_ERR(svn_branch__get_subtree(yca->branch, &s_yca, yca->eid, scratch_pool));
+
+ /* ALL_ELEMENTS enumerates the elements in union of subtrees YCA,SRC,TGT. */
+ all_elements = hash_overlay(s_src->tree->e_map,
+ s_tgt->tree->e_map);
+ all_elements = hash_overlay(s_yca->tree->e_map,
+ all_elements);
+
+ SVN_ERR(svn_branch__state_get_elements(src->branch, &src_elements,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_get_elements(tgt->branch, &tgt_elements,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_get_elements(yca->branch, &yca_elements,
+ scratch_pool));
+
+ /* Find the two changes for each element that is in any of the subtrees,
+ even for an element that is (for example) not in YCA or SRC but has
+ been moved into TGT. */
+ SVN_ERR(svnmover_element_differences(&diff_yca_src,
+ yca_elements, src_elements,
+ all_elements,
+ scratch_pool, scratch_pool));
+ /* ### We only need to know about YCA:TGT differences for elements that
+ differ in YCA:SRC, but right now we ask for all differences. */
+ SVN_ERR(svnmover_element_differences(&diff_yca_tgt,
+ yca_elements, tgt_elements,
+ all_elements,
+ scratch_pool, scratch_pool));
+
+ for (SVN_EID__HASH_ITER_SORTED_BY_EID(ei, all_elements, scratch_pool))
+ {
+ int eid = ei->eid;
+ svn_element__content_t **e_yca_src
+ = svn_eid__hash_get(diff_yca_src, eid);
+ svn_element__content_t **e_yca_tgt
+ = svn_eid__hash_get(diff_yca_tgt, eid);
+ svn_element__content_t *e_yca;
+ svn_element__content_t *e_src;
+ svn_element__content_t *e_tgt;
+ svn_element__content_t *result;
+ element_merge3_conflict_t *conflict;
+
+ svn_pool_clear(iterpool);
+
+ /* If an element hasn't changed in the source branch, there is
+ no need to do anything with it in the target branch. We could
+ use element_merge() for any case where at least one of (SRC,
+ TGT, YCA) exists, but we choose to skip it when SRC == YCA. */
+ if (! e_yca_src)
+ {
+ /* Still need to merge any subbranch linked to this element.
+ There were no changes to the link element but that doesn't
+ mean there were no changes to the linked branch. */
+ SVN_ERR(merge_subbranch(edit_txn, edit_branch,
+ src, tgt, yca, eid, iterpool));
+
+ continue;
+ }
+
+ e_yca = e_yca_src[0];
+ e_src = e_yca_src[1];
+ e_tgt = e_yca_tgt ? e_yca_tgt[1] : e_yca_src[0];
+
+ /* If some but not all of the three subtree-root elements are branch
+ roots, then we will see the parentage of this element changing to
+ or from 'no parent' in one or both sides of the merge. We want to
+ ignore this part of the difference, as parentage of a subtree root
+ element is by definition not part of a 'subtree', so blank it out.
+ (If we merged it, it could break the single-rooted-tree invariant
+ of the target branch.)
+ */
+ if (is_branch_root_element(src->branch, eid)
+ || is_branch_root_element(tgt->branch, eid)
+ || is_branch_root_element(yca->branch, eid))
+ {
+ e_src = svn_element__content_create(
+ e_tgt->parent_eid, e_tgt->name, e_src->payload, iterpool);
+ e_yca = svn_element__content_create(
+ e_tgt->parent_eid, e_tgt->name, e_yca->payload, iterpool);
+ }
+
+ element_merge(&result, &conflict,
+ eid, e_src, e_tgt, e_yca,
+ &policy,
+ scratch_pool, scratch_pool);
+
+ if (conflict)
+ {
+ svnmover_notify_v("! e%d <conflict>", eid);
+ svn_eid__hash_set(e_conflicts, eid,
+ element_merge3_conflict_dup(conflict, result_pool));
+ }
+ else
+ {
+ if (e_tgt && result)
+ {
+ svnmover_notify_v("M/V e%d %s%s",
+ eid, result->name,
+ subbranch_str(tgt->branch, eid, iterpool));
+ }
+ else if (e_tgt)
+ {
+ svnmover_notify_v("D e%d %s%s",
+ eid, e_yca->name,
+ subbranch_str(yca->branch, eid, iterpool));
+
+ /* ### If this is a subbranch-root element being deleted, shouldn't
+ we see if there were any changes to be merged in the subbranch,
+ and raise a delete-vs-edit conflict if so? */
+ }
+ else if (result)
+ {
+ svnmover_notify_v("A e%d %s%s",
+ eid, result->name,
+ subbranch_str(src->branch, eid, iterpool));
+ }
+
+ SVN_ERR(svn_branch__state_set_element(tgt->branch, eid,
+ result, iterpool));
+
+ if (result)
+ {
+ SVN_ERR(merge_subbranch(edit_txn, edit_branch,
+ src, tgt, yca, eid, iterpool));
+ }
+ }
+ }
+ svn_pool_destroy(iterpool);
+
+ conflict_storage->yca_branch = yca->branch;
+ conflict_storage->src_branch = src->branch;
+ conflict_storage->tgt_branch = tgt->branch;
+ conflict_storage->merged_branch = tgt->branch; /* ### should be != tgt */
+ conflict_storage->element_merge_conflicts = e_conflicts;
+ SVN_ERR(detect_clashes(&conflict_storage->name_clash_conflicts,
+ tgt->branch,
+ result_pool, scratch_pool));
+ SVN_ERR(detect_cycles(&conflict_storage->cycle_conflicts,
+ tgt->branch,
+ result_pool, scratch_pool));
+ SVN_ERR(detect_orphans(&conflict_storage->orphan_conflicts,
+ tgt->branch,
+ result_pool, scratch_pool));
+
+ svnmover_notify_v("merging into branch %s -- finished",
+ svn_branch__get_id(tgt->branch, scratch_pool));
+
+ *conflict_storage_p = conflict_storage;
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svnmover_branch_merge(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ conflict_storage_t **conflict_storage_p,
+ svn_branch__el_rev_id_t *src,
+ svn_branch__el_rev_id_t *tgt,
+ svn_branch__el_rev_id_t *yca,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ conflict_storage_t *conflicts;
+
+ /*SVN_ERR(verify_exists_in_branch(from, scratch_pool));*/
+ /*SVN_ERR(verify_exists_in_branch(to, scratch_pool));*/
+ /*SVN_ERR(verify_exists_in_branch(yca, scratch_pool));*/
+ /*SVN_ERR(verify_not_subbranch_root(from, scratch_pool));*/
+ /*SVN_ERR(verify_not_subbranch_root(to, scratch_pool));*/
+ /*SVN_ERR(verify_not_subbranch_root(yca, scratch_pool));*/
+
+ SVN_ERR(branch_merge_subtree_r(edit_txn, edit_branch,
+ &conflicts,
+ src, tgt, yca,
+ result_pool, scratch_pool));
+
+ if (conflict_storage_p)
+ {
+ if (svnmover_any_conflicts(conflicts))
+ {
+ *conflict_storage_p = conflicts;
+ }
+ else
+ {
+ *conflict_storage_p = NULL;
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
diff --git a/tools/dev/svnmover/ra.c b/tools/dev/svnmover/ra.c
new file mode 100644
index 0000000..f7293b2
--- /dev/null
+++ b/tools/dev/svnmover/ra.c
@@ -0,0 +1,586 @@
+/*
+ * ra_loader.c: logic for loading different RA library implementations
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+/*** Includes. ***/
+#define APR_WANT_STRFUNC
+#include <apr_want.h>
+
+#include <apr.h>
+#include <apr_strings.h>
+#include <apr_pools.h>
+#include <apr_hash.h>
+#include <apr_uri.h>
+
+#include "svn_hash.h"
+#include "svn_types.h"
+#include "svn_error.h"
+#include "svn_delta.h"
+#include "svn_ra.h"
+#include "svn_dirent_uri.h"
+#include "svn_props.h"
+#include "svn_iter.h"
+
+#include "private/svn_branch_compat.h"
+#include "private/svn_branch_repos.h"
+#include "private/svn_ra_private.h"
+#include "private/svn_delta_private.h"
+#include "private/svn_string_private.h"
+#include "svnmover.h"
+#include "svn_private_config.h"
+
+
+/* Read the branching info string VALUE belonging to revision REVISION.
+ */
+static svn_error_t *
+read_rev_prop(svn_string_t **value,
+ svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ svn_revnum_t revision,
+ apr_pool_t *result_pool)
+{
+ apr_pool_t *scratch_pool = result_pool;
+
+ if (branch_info_dir)
+ {
+ const char *file_path;
+ svn_stream_t *stream;
+ svn_error_t *err;
+
+ file_path = svn_dirent_join(branch_info_dir,
+ apr_psprintf(scratch_pool, "branch-info-r%ld",
+ revision), scratch_pool);
+ err = svn_stream_open_readonly(&stream, file_path, scratch_pool, scratch_pool);
+ if (err)
+ {
+ svn_error_clear(err);
+ *value = NULL;
+ return SVN_NO_ERROR;
+ }
+ SVN_ERR(err);
+ SVN_ERR(svn_string_from_stream2(value, stream, 0, result_pool));
+ }
+ else
+ {
+ SVN_ERR(svn_ra_rev_prop(ra_session, revision, "svn-br-info", value,
+ result_pool));
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Store the branching info string VALUE belonging to revision REVISION.
+ */
+static svn_error_t *
+write_rev_prop(svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ svn_revnum_t revision,
+ svn_string_t *value,
+ apr_pool_t *scratch_pool)
+{
+ if (branch_info_dir)
+ {
+ const char *file_path;
+ svn_error_t *err;
+
+ file_path = svn_dirent_join(branch_info_dir,
+ apr_psprintf(scratch_pool, "branch-info-r%ld",
+ revision), scratch_pool);
+ err = svn_io_file_create(file_path, value->data, scratch_pool);
+ if (err)
+ {
+ svn_error_clear(err);
+ SVN_ERR(svn_io_dir_make(branch_info_dir, APR_FPROT_OS_DEFAULT,
+ scratch_pool));
+ err = svn_io_file_create(file_path, value->data, scratch_pool);
+ }
+ SVN_ERR(err);
+ }
+ else
+ {
+ SVN_ERR(svn_ra_change_rev_prop2(ra_session, revision, "svn-br-info",
+ NULL, value, scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Create a new revision-root object and read the move-tracking /
+ * branch-tracking metadata from the repository into it.
+ */
+static svn_error_t *
+branch_revision_fetch_info(svn_branch__txn_t **txn_p,
+ svn_branch__repos_t *repos,
+ svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ svn_revnum_t revision,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_string_t *value;
+ svn_stream_t *stream;
+ svn_branch__txn_t *txn;
+
+ SVN_ERR_ASSERT(SVN_IS_VALID_REVNUM(revision));
+
+ /* Read initial state from repository */
+ SVN_ERR(read_rev_prop(&value, ra_session, branch_info_dir, revision,
+ scratch_pool));
+ if (! value && revision == 0)
+ {
+ value = svn_branch__get_default_r0_metadata(scratch_pool);
+ /*SVN_DBG(("fetch_per_revision_info(r%ld): LOADED DEFAULT INFO:\n%s",
+ revision, value->data));*/
+ SVN_ERR(write_rev_prop(ra_session, branch_info_dir, revision, value,
+ scratch_pool));
+ }
+ else if (! value)
+ {
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("Move-tracking metadata not found in r%ld "
+ "in this repository. Run svnmover on an "
+ "empty repository to initialize the "
+ "metadata"), revision);
+ }
+ stream = svn_stream_from_string(value, scratch_pool);
+
+ SVN_ERR(svn_branch__txn_parse(&txn, repos, stream,
+ result_pool, scratch_pool));
+
+ /* Self-test: writing out the info should produce exactly the same string. */
+ {
+ svn_stringbuf_t *buf = svn_stringbuf_create_empty(scratch_pool);
+
+ stream = svn_stream_from_stringbuf(buf, scratch_pool);
+ SVN_ERR(svn_branch__txn_serialize(txn, stream, scratch_pool));
+ SVN_ERR(svn_stream_close(stream));
+
+ SVN_ERR_ASSERT(svn_string_compare(value,
+ svn_stringbuf__morph_into_string(buf)));
+ }
+
+ *txn_p = txn;
+ return SVN_NO_ERROR;
+}
+
+/* Fetch all element payloads in TXN.
+ */
+static svn_error_t *
+txn_fetch_payloads(svn_branch__txn_t *txn,
+ svn_branch__compat_fetch_func_t fetch_func,
+ void *fetch_baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_array_header_t *branches = svn_branch__txn_get_branches(txn, scratch_pool);
+ int i;
+
+ /* Read payload of each element.
+ (In a real implementation, of course, we'd delay this until demanded.) */
+ for (i = 0; i < branches->nelts; i++)
+ {
+ svn_branch__state_t *branch = APR_ARRAY_IDX(branches, i, void *);
+ svn_element__tree_t *element_tree;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &element_tree,
+ scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, element_tree->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t *element /*= apr_hash_this_val(hi)*/;
+
+ SVN_ERR(svn_branch__state_get_element(branch, &element,
+ eid, scratch_pool));
+ if (! element->payload->is_subbranch_root)
+ {
+ SVN_ERR(svn_branch__compat_fetch(&element->payload,
+ txn,
+ element->payload->branch_ref,
+ fetch_func, fetch_baton,
+ result_pool, scratch_pool));
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Create a new repository object and read the move-tracking /
+ * branch-tracking metadata from the repository into it.
+ */
+static svn_error_t *
+branch_repos_fetch_info(svn_branch__repos_t **repos_p,
+ svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ svn_branch__compat_fetch_func_t fetch_func,
+ void *fetch_baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__repos_t *repos
+ = svn_branch__repos_create(result_pool);
+ svn_revnum_t base_revision;
+ svn_revnum_t r;
+
+ SVN_ERR(svn_ra_get_latest_revnum(ra_session, &base_revision, scratch_pool));
+
+ for (r = 0; r <= base_revision; r++)
+ {
+ svn_branch__txn_t *txn;
+
+ SVN_ERR(branch_revision_fetch_info(&txn,
+ repos, ra_session, branch_info_dir,
+ r,
+ result_pool, scratch_pool));
+ SVN_ERR(svn_branch__repos_add_revision(repos, txn));
+ SVN_ERR(txn_fetch_payloads(txn, fetch_func, fetch_baton,
+ result_pool, scratch_pool));
+ }
+
+ *repos_p = repos;
+ return SVN_NO_ERROR;
+}
+
+/* Return a mutable state based on revision BASE_REVISION in REPOS.
+ */
+static svn_error_t *
+branch_get_mutable_state(svn_branch__txn_t **txn_p,
+ svn_branch__repos_t *repos,
+ svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ svn_revnum_t base_revision,
+ svn_branch__compat_fetch_func_t fetch_func,
+ void *fetch_baton,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__txn_t *txn;
+ apr_array_header_t *branches;
+ int i;
+
+ SVN_ERR_ASSERT(SVN_IS_VALID_REVNUM(base_revision));
+
+ SVN_ERR(branch_revision_fetch_info(&txn,
+ repos, ra_session, branch_info_dir,
+ base_revision,
+ result_pool, scratch_pool));
+ SVN_ERR_ASSERT(txn->rev == base_revision);
+ SVN_ERR(txn_fetch_payloads(txn, fetch_func, fetch_baton,
+ result_pool, scratch_pool));
+
+ /* Update all the 'predecessor' info to point to the BASE_REVISION instead
+ of to that revision's predecessor. */
+ txn->base_rev = base_revision;
+ txn->rev = SVN_INVALID_REVNUM;
+
+ branches = svn_branch__txn_get_branches(txn, scratch_pool);
+ for (i = 0; i < branches->nelts; i++)
+ {
+ svn_branch__state_t *b = APR_ARRAY_IDX(branches, i, void *);
+ svn_branch__history_t *history
+ = svn_branch__history_create_empty(result_pool);
+
+ /* Set each branch's parent to the branch in the base rev */
+ svn_branch__rev_bid_t *parent
+ = svn_branch__rev_bid_create(base_revision,
+ svn_branch__get_id(b, scratch_pool),
+ result_pool);
+
+ svn_hash_sets(history->parents,
+ apr_pstrdup(result_pool, b->bid), parent);
+ SVN_ERR(svn_branch__state_set_history(b, history, scratch_pool));
+ }
+
+ *txn_p = txn;
+ return SVN_NO_ERROR;
+}
+
+/* Store the move-tracking / branch-tracking metadata from TXN into the
+ * repository. TXN->rev is the newly committed revision number.
+ */
+static svn_error_t *
+store_repos_info(svn_branch__txn_t *txn,
+ svn_ra_session_t *ra_session,
+ const char *branch_info_dir,
+ apr_pool_t *scratch_pool)
+{
+ svn_stringbuf_t *buf = svn_stringbuf_create_empty(scratch_pool);
+ svn_stream_t *stream = svn_stream_from_stringbuf(buf, scratch_pool);
+
+ SVN_ERR(svn_branch__txn_serialize(txn, stream, scratch_pool));
+
+ SVN_ERR(svn_stream_close(stream));
+ /*SVN_DBG(("store_repos_info: %s", buf->data));*/
+ SVN_ERR(write_rev_prop(ra_session, branch_info_dir, txn->rev,
+ svn_stringbuf__morph_into_string(buf), scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+struct ccw_baton
+{
+ svn_commit_callback2_t original_callback;
+ void *original_baton;
+
+ svn_ra_session_t *session;
+ const char *branch_info_dir;
+ svn_branch__txn_t *branching_txn;
+};
+
+/* Wrapper which stores the branching/move-tracking info.
+ */
+static svn_error_t *
+commit_callback_wrapper(const svn_commit_info_t *commit_info,
+ void *baton,
+ apr_pool_t *pool)
+{
+ struct ccw_baton *ccwb = baton;
+
+ /* if this commit used element-branching info, store the new info */
+ if (ccwb->branching_txn)
+ {
+ svn_branch__repos_t *repos = ccwb->branching_txn->repos;
+
+ ccwb->branching_txn->rev = commit_info->revision;
+ SVN_ERR(svn_branch__repos_add_revision(repos, ccwb->branching_txn));
+ SVN_ERR(store_repos_info(ccwb->branching_txn, ccwb->session,
+ ccwb->branch_info_dir, pool));
+ }
+
+ /* call the wrapped callback */
+ if (ccwb->original_callback)
+ {
+ SVN_ERR(ccwb->original_callback(commit_info, ccwb->original_baton, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Some RA layers do not correctly fill in REPOS_ROOT in commit_info, or
+ they are third-party layers conforming to an older commit_info structure.
+ Interpose a utility function to ensure the field is valid. */
+static void
+remap_commit_callback(svn_commit_callback2_t *callback,
+ void **callback_baton,
+ svn_ra_session_t *session,
+ svn_branch__txn_t *branching_txn,
+ const char *branch_info_dir,
+ svn_commit_callback2_t original_callback,
+ void *original_baton,
+ apr_pool_t *result_pool)
+{
+ /* Allocate this in RESULT_POOL, since the callback will be called
+ long after this function has returned. */
+ struct ccw_baton *ccwb = apr_palloc(result_pool, sizeof(*ccwb));
+
+ ccwb->session = session;
+ ccwb->branch_info_dir = apr_pstrdup(result_pool, branch_info_dir);
+ ccwb->branching_txn = branching_txn;
+ ccwb->original_callback = original_callback;
+ ccwb->original_baton = original_baton;
+
+ *callback = commit_callback_wrapper;
+ *callback_baton = ccwb;
+}
+
+
+/* Ev3 shims */
+struct fb_baton {
+ /* A session parented at the repository root */
+ svn_ra_session_t *session;
+ const char *repos_root_url;
+ const char *session_path;
+};
+
+/* Fetch kind and/or props and/or text.
+ *
+ * Implements svn_branch__compat_fetch_func_t. */
+static svn_error_t *
+fetch(svn_node_kind_t *kind_p,
+ apr_hash_t **props_p,
+ svn_stringbuf_t **file_text,
+ apr_hash_t **children_names,
+ void *baton,
+ const char *repos_relpath,
+ svn_revnum_t revision,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ struct fb_baton *fbb = baton;
+ svn_node_kind_t kind;
+ apr_hash_index_t *hi;
+
+ if (props_p)
+ *props_p = NULL;
+ if (file_text)
+ *file_text = NULL;
+ if (children_names)
+ *children_names = NULL;
+
+ SVN_ERR(svn_ra_check_path(fbb->session, repos_relpath, revision,
+ &kind, scratch_pool));
+ if (kind_p)
+ *kind_p = kind;
+ if (kind == svn_node_file && (props_p || file_text))
+ {
+ svn_stream_t *file_stream = NULL;
+
+ if (file_text)
+ {
+ *file_text = svn_stringbuf_create_empty(result_pool);
+ file_stream = svn_stream_from_stringbuf(*file_text, scratch_pool);
+ }
+ SVN_ERR(svn_ra_get_file(fbb->session, repos_relpath, revision,
+ file_stream, NULL, props_p, result_pool));
+ if (file_text)
+ {
+ SVN_ERR(svn_stream_close(file_stream));
+ }
+ }
+ else if (kind == svn_node_dir && (props_p || children_names))
+ {
+ SVN_ERR(svn_ra_get_dir2(fbb->session,
+ children_names, NULL, props_p,
+ repos_relpath, revision,
+ 0 /*minimal child info*/,
+ result_pool));
+ }
+
+ /* Remove non-regular props */
+ if (props_p && *props_p)
+ {
+ for (hi = apr_hash_first(scratch_pool, *props_p); hi; hi = apr_hash_next(hi))
+ {
+ const char *name = apr_hash_this_key(hi);
+
+ if (svn_property_kind2(name) != svn_prop_regular_kind)
+ svn_hash_sets(*props_p, name, NULL);
+
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_ra_load_branching_state(svn_branch__txn_t **branching_txn_p,
+ svn_branch__compat_fetch_func_t *fetch_func,
+ void **fetch_baton,
+ svn_ra_session_t *session,
+ const char *branch_info_dir,
+ svn_revnum_t base_revision,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__repos_t *repos;
+ const char *repos_root_url, *session_url, *base_relpath;
+ struct fb_baton *fbb = apr_palloc(result_pool, sizeof (*fbb));
+
+ if (base_revision == SVN_INVALID_REVNUM)
+ {
+ SVN_ERR(svn_ra_get_latest_revnum(session, &base_revision, scratch_pool));
+ }
+
+ /* fetcher */
+ SVN_ERR(svn_ra_get_repos_root2(session, &repos_root_url, result_pool));
+ SVN_ERR(svn_ra_get_session_url(session, &session_url, scratch_pool));
+ base_relpath = svn_uri_skip_ancestor(repos_root_url, session_url, result_pool);
+ SVN_ERR(svn_ra__dup_session(&fbb->session, session, repos_root_url, result_pool, scratch_pool));
+ fbb->session_path = base_relpath;
+ fbb->repos_root_url = repos_root_url;
+ *fetch_func = fetch;
+ *fetch_baton = fbb;
+
+ SVN_ERR(branch_repos_fetch_info(&repos,
+ session, branch_info_dir,
+ *fetch_func, *fetch_baton,
+ result_pool, scratch_pool));
+ SVN_ERR(branch_get_mutable_state(branching_txn_p,
+ repos, session, branch_info_dir,
+ base_revision,
+ *fetch_func, *fetch_baton,
+ result_pool, scratch_pool));
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_ra_get_commit_txn(svn_ra_session_t *session,
+ svn_branch__txn_t **edit_txn_p,
+ apr_hash_t *revprop_table,
+ svn_commit_callback2_t commit_callback,
+ void *commit_baton,
+ apr_hash_t *lock_tokens,
+ svn_boolean_t keep_locks,
+ const char *branch_info_dir,
+ apr_pool_t *pool)
+{
+ svn_branch__txn_t *branching_txn;
+ svn_branch__compat_fetch_func_t fetch_func;
+ void *fetch_baton;
+ const svn_delta_editor_t *deditor;
+ void *dedit_baton;
+ svn_branch__compat_shim_connector_t *shim_connector;
+
+ /* load branching info
+ * ### Currently we always start from a single base revision, never from
+ * a mixed-rev state */
+ SVN_ERR(svn_ra_load_branching_state(&branching_txn, &fetch_func, &fetch_baton,
+ session, branch_info_dir,
+ SVN_INVALID_REVNUM /*base_revision*/,
+ pool, pool));
+
+ /* arrange for branching info to be stored after commit */
+ remap_commit_callback(&commit_callback, &commit_baton,
+ session, branching_txn, branch_info_dir,
+ commit_callback, commit_baton, pool);
+
+ SVN_ERR(svn_ra_get_commit_editor3(session, &deditor, &dedit_baton,
+ revprop_table,
+ commit_callback, commit_baton,
+ lock_tokens, keep_locks, pool));
+
+ /* Convert to Ev3 */
+ {
+ const char *repos_root_url;
+
+ SVN_ERR(svn_ra_get_repos_root2(session, &repos_root_url, pool));
+
+ /*SVN_ERR(svn_delta__get_debug_editor(&deditor, &dedit_baton,
+ deditor, dedit_baton, "", pool));*/
+ SVN_ERR(svn_branch__compat_txn_from_delta_for_commit(
+ edit_txn_p,
+ &shim_connector,
+ deditor, dedit_baton, branching_txn,
+ repos_root_url,
+ fetch_func, fetch_baton,
+ NULL, NULL /*cancel*/,
+ pool, pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
diff --git a/tools/dev/svnmover/scanlog.c b/tools/dev/svnmover/scanlog.c
new file mode 100644
index 0000000..ebf8c53
--- /dev/null
+++ b/tools/dev/svnmover/scanlog.c
@@ -0,0 +1,517 @@
+/*
+ * scanlog.c: scanning the log for moves
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+/* ==================================================================== */
+
+
+
+/*** Includes. ***/
+
+#include "svn_hash.h"
+#include "svn_wc.h"
+#include "svn_client.h"
+#include "svn_error.h"
+#include "svn_config.h"
+#include "svn_time.h"
+#include "svn_dirent_uri.h"
+#include "svn_path.h"
+#include "svn_pools.h"
+#include "svn_io.h"
+
+#include "private/svn_client_private.h"
+#include "private/svn_wc_private.h"
+#include "private/svn_ra_private.h"
+
+#include "svnmover.h"
+
+#include "svn_private_config.h"
+
+
+/* From moves-scan-log branch */
+
+svn_repos_move_info_t *
+svn_repos_move_info_create(const char *moved_from_repos_relpath,
+ const char *moved_to_repos_relpath,
+ svn_revnum_t revision,
+ svn_revnum_t copyfrom_rev,
+ svn_repos_move_info_t *prev,
+ svn_repos_move_info_t *next,
+ apr_pool_t *result_pool)
+{
+ svn_repos_move_info_t *move = apr_palloc(result_pool, sizeof(*move));
+
+ move->moved_from_repos_relpath = moved_from_repos_relpath;
+ move->moved_to_repos_relpath = moved_to_repos_relpath;
+ move->revision = revision;
+ move->copyfrom_rev = copyfrom_rev;
+ move->prev = prev;
+ move->next = next;
+
+ return move;
+}
+
+const char *
+svn_client__format_move_chain_for_display(svn_repos_move_info_t *first_move,
+ const char *indent,
+ apr_pool_t *result_pool)
+{
+ const char *s;
+ svn_repos_move_info_t *last_move;
+
+ last_move = first_move;
+ while (last_move->next)
+ last_move = last_move->next;
+
+ if (last_move != first_move)
+ {
+ svn_repos_move_info_t *this_move;
+
+ s = apr_psprintf(result_pool,
+ _("Combined move:\n%s %s@%ld -> %s\n"
+ "%sIndividual moves:\n"),
+ indent, first_move->moved_from_repos_relpath,
+ first_move->copyfrom_rev,
+ last_move->moved_to_repos_relpath, indent);
+
+ this_move = first_move;
+ do
+ {
+ s = apr_pstrcat(result_pool, s,
+ apr_psprintf(
+ result_pool, _("%s [r%ld] %s@%ld -> %s\n"),
+ indent,
+ this_move->revision,
+ this_move->moved_from_repos_relpath,
+ this_move->copyfrom_rev,
+ this_move->moved_to_repos_relpath),
+ (char *)NULL);
+ this_move = this_move->next;
+ }
+ while (this_move);
+ }
+ else
+ s = apr_psprintf(result_pool, _(" [r%ld] %s@%ld -> %s\n"),
+ first_move->revision,
+ first_move->moved_from_repos_relpath,
+ first_move->copyfrom_rev,
+ first_move->moved_to_repos_relpath);
+
+ return s;
+}
+
+typedef struct scan_moves_log_receiver_baton {
+ /*const char *anchor_abspath;*/
+ svn_client_ctx_t *ctx;
+ svn_revnum_t start;
+ svn_revnum_t end;
+ svn_ra_session_t *ra_session;
+
+ /* The moved nodes hash to be populated.
+ * Maps a revision number to an array of svn_repos_move_info_t
+ * objects describing moves which happened in the revision.
+ *
+ * Given a sequence of moves which happened in given revisions, such as:
+ * rA: mv x->z
+ * rA: mv a->b
+ * rB: mv b->c
+ * rC: mv c->d
+ * we map each revision number to all moves which happened in the
+ * revision, which looks as follows:
+ * rA : [(rA, x->z), (rA, a->b)]
+ * rB : [(rB, b->c)]
+ * rC : [(rC, c->d)]
+ * This allows an update to find relevant moves based on the base
+ * revision of a node (during updates the base revision of each node
+ * in the working copy is arbitrary so we might not know the nodes 'a'
+ * and 'x' under these names).
+ * Additionally, all moves pertaining to the same node are chained into a
+ * doubly-linked list via 'next' and 'prev' pointers (see definition of
+ * svn_repos_move_info_t).
+ * This way, an update can look up all moves relevant to a node, forwards
+ * or backwards in history, once it has located a relevant move in the chain.
+ * This can be visualized as follows:
+ * rA : [(rA, x->z, prev=>NULL, next=>NULL),
+ * (rA, a->b, prev=>NULL, next=>(rB, b->c))]
+ * rB : [(rB, b->c), prev=>(rA, a->b), next=>(rC, c->d)]
+ * rC : [(rC, c->d), prev=>(rB, c->d), next=>NULL]
+ */
+ apr_hash_t *moves;
+
+ /* Temporary map of move-target paths to repos_move_info_t.
+ * Used to link multiple moves of the same node across revisions. */
+ apr_hash_t *moves_by_target_path;
+} scan_moves_log_receiver_baton;
+
+typedef struct copy_info {
+ const char *copyto_path;
+ const char *copyfrom_path;
+ svn_revnum_t copyfrom_rev;
+} copy_info;
+
+
+/* Set *RELATED to true if the deleted node at repository relpath
+ * DELETED_PATH@DELETED_REV is ancestrally related to the node at
+ * repository relpath COPYFROM_PATH@COPYFROM_REV, else set it to false.
+ *
+ * ### JAF: In practice this attempts to trace back, starting from
+ * DELETED_PATH@(DELETED_REV-1). What if that does not exist?
+ */
+static svn_error_t *
+check_ancestry(svn_boolean_t *related,
+ const char *session_url,
+ const char *repos_root_url,
+ const char *deleted_path,
+ svn_revnum_t deleted_rev,
+ const char *copyfrom_path,
+ svn_revnum_t copyfrom_rev,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *locations;
+ const char *old_url;
+ const char *old_location;
+ const char *relpath;
+ svn_ra_session_t *ra_session2;
+ apr_array_header_t *location_revisions;
+
+ *related = FALSE;
+
+ location_revisions = apr_array_make(scratch_pool, 1, sizeof(svn_revnum_t));
+ APR_ARRAY_PUSH(location_revisions, svn_revnum_t) = copyfrom_rev;
+ old_url = svn_uri_canonicalize(apr_pstrcat(scratch_pool,
+ repos_root_url, "/",
+ deleted_path, NULL),
+ scratch_pool);
+ relpath = svn_uri_skip_ancestor(session_url, old_url, scratch_pool);
+ SVN_ERR(svn_client_open_ra_session2(&ra_session2, session_url, NULL,
+ ctx, scratch_pool, scratch_pool));
+ if (relpath == NULL)
+ {
+ svn_error_t *err;
+
+ /* The deleted path is outside of the baton's RA session URL.
+ * Try to open the new RA session to the repository root. */
+ SVN_ERR(svn_ra_reparent(ra_session2, repos_root_url, scratch_pool));
+ relpath = svn_uri_skip_ancestor(repos_root_url, old_url, scratch_pool);
+ if (relpath == NULL)
+ return SVN_NO_ERROR;
+ err = svn_ra_get_locations(ra_session2, &locations, relpath,
+ deleted_rev - 1, location_revisions,
+ scratch_pool);
+ if (err)
+ {
+ if (err->apr_err == SVN_ERR_RA_NOT_AUTHORIZED ||
+ err->apr_err == SVN_ERR_RA_DAV_FORBIDDEN)
+ {
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+ }
+ else
+ return svn_error_trace(err);
+ }
+ }
+ else
+ SVN_ERR(svn_ra_get_locations(ra_session2, &locations, relpath,
+ deleted_rev - 1, location_revisions,
+ scratch_pool));
+
+ old_location = apr_hash_get(locations, &copyfrom_rev, sizeof(svn_revnum_t));
+ *related = (old_location &&
+ strcmp(old_location[0] == '/' ? old_location + 1 : old_location,
+ copyfrom_path) == 0);
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+scan_moves_log_receiver(void *baton,
+ svn_log_entry_t *log_entry,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_index_t *hi;
+ apr_hash_t *copies;
+ apr_array_header_t *deleted_paths;
+ struct scan_moves_log_receiver_baton *b = baton;
+ apr_pool_t *result_pool = apr_hash_pool_get(b->moves);
+ apr_pool_t *iterpool;
+ int i;
+ const char *session_url;
+ const char *repos_root_url;
+ apr_array_header_t *moves;
+
+ if (b->ctx->notify_func2)
+ {
+#if 0
+ svn_wc_notify_t *notify;
+ notify = svn_wc_create_notify(b->anchor_abspath,
+ svn_wc_notify_moves_scan_log_in_progress,
+ scratch_pool);
+ notify->moves_scan_log_start_rev = b->start;
+ notify->moves_scan_log_end_rev = b->end;
+ notify->moves_scan_log_current_rev = log_entry->revision;
+ b->ctx->notify_func2(b->ctx->notify_baton2, notify, scratch_pool);
+#endif
+ }
+
+ if (log_entry->changed_paths2 == NULL)
+ return SVN_NO_ERROR;
+
+ copies = apr_hash_make(scratch_pool);
+ deleted_paths = apr_array_make(scratch_pool, 0, sizeof(const char *));
+
+ /* Scan for copied and deleted nodes in this revision. */
+ for (hi = apr_hash_first(scratch_pool, log_entry->changed_paths2);
+ hi; hi = apr_hash_next(hi))
+ {
+ const char *path = apr_hash_this_key(hi);
+ svn_log_changed_path2_t *data = apr_hash_this_val(hi);
+
+ if ((data->action == 'A' || data->action == 'R') && data->copyfrom_path)
+ {
+ struct copy_info *copy;
+ apr_array_header_t *copies_with_same_source_path;
+
+ SVN_ERR_ASSERT(path[0] == '/');
+
+ if (data->copyfrom_path[0] == '/')
+ data->copyfrom_path++;
+
+ copy = apr_palloc(scratch_pool, sizeof(*copy));
+ copy->copyto_path = path + 1; /* Strip leading '/' */
+ copy->copyfrom_path = data->copyfrom_path;
+ copy->copyfrom_rev = data->copyfrom_rev;
+ copies_with_same_source_path = apr_hash_get(copies,
+ data->copyfrom_path,
+ APR_HASH_KEY_STRING);
+ if (copies_with_same_source_path == NULL)
+ {
+ copies_with_same_source_path = apr_array_make(
+ result_pool, 1,
+ sizeof(struct copy_info *));
+ apr_hash_set(copies, copy->copyfrom_path, APR_HASH_KEY_STRING,
+ copies_with_same_source_path);
+ }
+ APR_ARRAY_PUSH(copies_with_same_source_path,
+ struct copy_info *) = copy;
+ }
+
+ if (data->action == 'D' || data->action == 'R')
+ {
+ const char *parent_path;
+
+ /* ### Is this true? What does the API guarantee? Is it
+ ### true that copyfrom_path is a relpath? */
+ SVN_ERR_ASSERT(path[0] == '/');
+
+ /* When a delete is within a copy the deleted path in the
+ changed_paths2 hash is the copied path, but for the purposes
+ of move detection we want the pre-copy path.
+
+ ### Not sure if this is the correct thing to do. Yes, it
+ ### allows us to detect moves in copies/moves but will it
+ ### lead to false positives? Does it matter that the
+ ### adjusted path may not have been committed? Does it
+ ### matter that the adjusted path may be the same as
+ ### another committed path? */
+ parent_path = svn_dirent_dirname(path, scratch_pool);
+ while(strcmp(parent_path, "/"))
+ {
+ svn_log_changed_path2_t *data2
+ = apr_hash_get(log_entry->changed_paths2, parent_path,
+ APR_HASH_KEY_STRING);
+
+ if (data2 && data2->action == 'A')
+ {
+ const char *relpath = svn_dirent_skip_ancestor(parent_path,
+ path);
+ path = svn_dirent_join_many(scratch_pool, "/",
+ data2->copyfrom_path, relpath,
+ NULL);
+ break;
+ }
+ else
+ parent_path = svn_dirent_dirname(parent_path, scratch_pool);
+ }
+ APR_ARRAY_PUSH(deleted_paths, const char *) = path + 1;
+ }
+ }
+
+ /* If a node was deleted at one location and copied from the deleted
+ * location to a new location within the same revision, put the node
+ * on the moved-nodes list. */
+ SVN_ERR(svn_ra_get_session_url(b->ra_session, &session_url, scratch_pool));
+ SVN_ERR(svn_ra_get_repos_root2(b->ra_session, &repos_root_url, scratch_pool));
+ iterpool = svn_pool_create(scratch_pool);
+ for (i = 0; i < deleted_paths->nelts; i++)
+ {
+ const char *deleted_path;
+ apr_array_header_t *copies_with_same_source_path;
+ svn_repos_move_info_t *new_move;
+ svn_repos_move_info_t *prior_move;
+ svn_boolean_t related;
+ int j;
+
+ deleted_path = APR_ARRAY_IDX(deleted_paths, i, const char *);
+ copies_with_same_source_path = apr_hash_get(copies, deleted_path,
+ APR_HASH_KEY_STRING);
+ if (copies_with_same_source_path == NULL)
+ continue;
+
+ svn_pool_clear(iterpool);
+
+ for (j = 0; j < copies_with_same_source_path->nelts; j++)
+ {
+ struct copy_info *copy;
+
+ copy = APR_ARRAY_IDX(copies_with_same_source_path, j,
+ struct copy_info *);
+
+ /* We found a deleted node which matches the copyfrom path of
+ * a copied node. Verify that the deleted node is an ancestor
+ * of the copied node. Tracing back history of the deleted node
+ * from revision log_entry->revision-1 to the copyfrom-revision
+ * we must end up at the copyfrom-path. */
+ SVN_ERR(check_ancestry(&related, session_url, repos_root_url,
+ deleted_path, log_entry->revision,
+ copy->copyfrom_path,
+ copy->copyfrom_rev,
+ b->ctx, iterpool));
+ if (!related)
+ continue;
+
+ /* ### TODO:
+ * If the node was not copied from the most recent last-changed
+ * revision of the deleted node, this is not a move but a
+ * "copy from the past + delete". */
+
+ /* Remember details of this move. */
+ new_move = svn_repos_move_info_create(
+ apr_pstrdup(result_pool, deleted_path),
+ apr_pstrdup(result_pool, copy->copyto_path),
+ log_entry->revision, copy->copyfrom_rev,
+ NULL, NULL, result_pool);
+
+ /* Link together multiple moves of the same node. */
+ prior_move = apr_hash_get(b->moves_by_target_path,
+ new_move->moved_from_repos_relpath,
+ APR_HASH_KEY_STRING);
+ if (prior_move)
+ {
+ /* Tracing back history of the delete-half of the new move
+ * to the copyfrom-revision of the prior move we must end up
+ * at the delete-half of the prior move. */
+ SVN_ERR(check_ancestry(&related, session_url, repos_root_url,
+ new_move->moved_from_repos_relpath,
+ new_move->revision,
+ prior_move->moved_from_repos_relpath,
+ prior_move->copyfrom_rev,
+ b->ctx, iterpool));
+ if (related)
+ {
+ prior_move->next = new_move;
+ new_move->prev = prior_move;
+ }
+ }
+ apr_hash_set(b->moves_by_target_path,
+ new_move->moved_to_repos_relpath,
+ APR_HASH_KEY_STRING, new_move);
+
+ /* Add this move to the list of moves in this revision. */
+ moves = apr_hash_get(b->moves, &new_move->revision,
+ sizeof(svn_revnum_t));
+ if (moves == NULL)
+ {
+ moves = apr_array_make(result_pool, 1,
+ sizeof(svn_repos_move_info_t *));
+ APR_ARRAY_PUSH(moves, svn_repos_move_info_t *) = new_move;
+ apr_hash_set(b->moves, &new_move->revision, sizeof(svn_revnum_t),
+ moves);
+ }
+ else
+ APR_ARRAY_PUSH(moves, svn_repos_move_info_t *) = new_move;
+ }
+ }
+ svn_pool_destroy(iterpool);
+
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svn_client__get_repos_moves(apr_hash_t **moves,
+ const char *anchor_abspath,
+ svn_ra_session_t *ra_session,
+ svn_revnum_t start,
+ svn_revnum_t end,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ struct scan_moves_log_receiver_baton lrb;
+
+ /*lrb.anchor_abspath = anchor_abspath;*/
+ lrb.ctx = ctx;
+ lrb.moves = apr_hash_make(result_pool);
+ lrb.start = start;
+ lrb.end = end;
+ lrb.ra_session = ra_session;
+ lrb.moves_by_target_path = apr_hash_make(scratch_pool);
+
+ if (ctx->notify_func2)
+ {
+#if 0
+ svn_wc_notify_t *notify;
+ notify = svn_wc_create_notify(b->anchor_abspath,
+ svn_wc_notify_moves_scan_log_start,
+ scratch_pool);
+ notify->moves_scan_log_start_rev = start;
+ notify->moves_scan_log_end_rev = end;
+ notify->moves_scan_log_current_rev = start;
+ ctx->notify_func2(b->ctx->notify_baton2, notify, scratch_pool);
+#endif
+ }
+
+ SVN_ERR(svn_ra_get_log2(ra_session, NULL, start, end, 0, TRUE, FALSE,
+ FALSE, apr_array_make(scratch_pool, 0,
+ sizeof(const char *)),
+ scan_moves_log_receiver, &lrb, scratch_pool));
+
+ if (ctx->notify_func2)
+ {
+#if 0
+ svn_wc_notify_t *notify;
+ notify = svn_wc_create_notify(b->anchor_abspath,
+ svn_wc_notify_moves_scan_log_done,
+ scratch_pool);
+ notify->moves_scan_log_start_rev = start;
+ notify->moves_scan_log_end_rev = end;
+ notify->moves_scan_log_current_rev = end;
+ b->ctx->notify_func2(b->ctx->notify_baton2, notify, scratch_pool);
+#endif
+ }
+
+ if (moves)
+ *moves = lrb.moves;
+
+ return SVN_NO_ERROR;
+}
+
diff --git a/tools/dev/svnmover/svnmover.c b/tools/dev/svnmover/svnmover.c
new file mode 100644
index 0000000..8bc8b65
--- /dev/null
+++ b/tools/dev/svnmover/svnmover.c
@@ -0,0 +1,4759 @@
+/*
+ * svnmover.c: Concept Demo for Move Tracking and Branching
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+
+#include <apr_lib.h>
+
+#include "svn_private_config.h"
+#include "svn_hash.h"
+#include "svn_iter.h"
+#include "svn_client.h"
+#include "svn_cmdline.h"
+#include "svn_config.h"
+#include "svn_error.h"
+#include "svn_path.h"
+#include "svn_pools.h"
+#include "svn_props.h"
+#include "svn_string.h"
+#include "svn_subst.h"
+#include "svn_utf.h"
+#include "svn_version.h"
+#include "svnmover.h"
+
+#include "private/svn_cmdline_private.h"
+#include "private/svn_subr_private.h"
+#include "private/svn_branch_repos.h"
+#include "private/svn_branch_nested.h"
+#include "private/svn_branch_compat.h"
+#include "private/svn_ra_private.h"
+#include "private/svn_string_private.h"
+#include "private/svn_sorts_private.h"
+#include "private/svn_token.h"
+#include "private/svn_client_private.h"
+#include "private/svn_delta_private.h"
+
+#ifdef HAVE_LINENOISE
+#include "linenoise/linenoise.h"
+#endif
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_client", svn_client_version },
+ { "svn_subr", svn_subr_version },
+ { "svn_ra", svn_ra_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+static svn_boolean_t quiet = FALSE;
+
+/* UI mode: whether to display output in terms of paths or elements */
+int the_ui_mode = UI_MODE_EIDS;
+static const svn_token_map_t ui_mode_map[]
+ = { {"eids", UI_MODE_EIDS},
+ {"e", UI_MODE_EIDS},
+ {"paths", UI_MODE_PATHS},
+ {"p", UI_MODE_PATHS},
+ {"serial", UI_MODE_SERIAL},
+ {"s", UI_MODE_SERIAL},
+ {NULL, SVN_TOKEN_UNKNOWN} };
+
+#define is_branch_root_element(branch, eid) \
+ (svn_branch__root_eid(branch) == (eid))
+
+/* Is BRANCH1 the same branch as BRANCH2? Compare by full branch-ids; don't
+ require identical branch objects. */
+#define BRANCH_IS_SAME_BRANCH(branch1, branch2, scratch_pool) \
+ (strcmp(svn_branch__get_id(branch1, scratch_pool), \
+ svn_branch__get_id(branch2, scratch_pool)) == 0)
+
+static svn_boolean_t use_coloured_output = FALSE;
+
+#ifndef WIN32
+
+/* Some ANSI escape codes for controlling text colour in terminal output. */
+#define TEXT_RESET "\x1b[0m"
+#define TEXT_FG_BLACK "\x1b[30m"
+#define TEXT_FG_RED "\x1b[31m"
+#define TEXT_FG_GREEN "\x1b[32m"
+#define TEXT_FG_YELLOW "\x1b[33m"
+#define TEXT_FG_BLUE "\x1b[34m"
+#define TEXT_FG_MAGENTA "\x1b[35m"
+#define TEXT_FG_CYAN "\x1b[36m"
+#define TEXT_FG_WHITE "\x1b[37m"
+#define TEXT_BG_BLACK "\x1b[40m"
+#define TEXT_BG_RED "\x1b[41m"
+#define TEXT_BG_GREEN "\x1b[42m"
+#define TEXT_BG_YELLOW "\x1b[43m"
+#define TEXT_BG_BLUE "\x1b[44m"
+#define TEXT_BG_MAGENTA "\x1b[45m"
+#define TEXT_BG_CYAN "\x1b[46m"
+#define TEXT_BG_WHITE "\x1b[47m"
+
+#define settext(text_attr) \
+ do { \
+ if (use_coloured_output) \
+ { fputs(text_attr, stdout); fflush(stdout); } \
+ } while (0)
+#define settext_stderr(text_attr) \
+ do { \
+ if (use_coloured_output) \
+ { fputs(text_attr, stderr); fflush(stderr); } \
+ } while (0)
+
+#else
+
+/* To support colour on Windows, we could try:
+ *
+ * https://github.com/mattn/ansicolor-w32.c
+ *
+ * (I notice some obvious bugs in its puts/fputs implementations: the #defines
+ * point to _fprintf_w32 instead of _fputs_w32, and puts() fails to append a
+ * newline).
+ */
+
+#define settext(code)
+#define settext_stderr(code)
+
+#endif
+
+__attribute__((format(printf, 1, 2)))
+void
+svnmover_notify(const char *fmt,
+ ...)
+{
+ va_list ap;
+
+ settext(TEXT_FG_GREEN);
+ va_start(ap, fmt);
+ vprintf(fmt, ap);
+ va_end(ap);
+ settext(TEXT_RESET);
+ printf("\n");
+}
+
+__attribute__((format(printf, 1, 2)))
+void
+svnmover_notify_v(const char *fmt,
+ ...)
+{
+ va_list ap;
+
+ if (! quiet)
+ {
+ settext(TEXT_FG_BLUE);
+ va_start(ap, fmt);
+ vprintf(fmt, ap);
+ va_end(ap);
+ settext(TEXT_RESET);
+ printf("\n");
+ }
+}
+
+#define SVN_CL__LOG_SEP_STRING \
+ "------------------------------------------------------------------------\n"
+
+/* ====================================================================== */
+
+/* Set the WC base revision of element EID to BASE_REV.
+ */
+static void
+svnmover_wc_set_base_rev(svnmover_wc_t *wc,
+ svn_branch__state_t *branch,
+ int eid,
+ svn_revnum_t base_rev)
+{
+ apr_hash_t *branch_base_revs = svn_hash_gets(wc->base_revs, branch->bid);
+ void *val = apr_pmemdup(wc->pool, &base_rev, sizeof(base_rev));
+
+ if (!branch_base_revs)
+ {
+ branch_base_revs = apr_hash_make(wc->pool);
+ svn_hash_sets(wc->base_revs, apr_pstrdup(wc->pool, branch->bid),
+ branch_base_revs);
+ }
+ svn_eid__hash_set(branch_base_revs, eid, val);
+}
+
+/* Get the WC base revision of element EID, or SVN_INVALID_REVNUM if
+ * element EID is not present in the WC base.
+ */
+static svn_revnum_t
+svnmover_wc_get_base_rev(svnmover_wc_t *wc,
+ svn_branch__state_t *branch,
+ int eid,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *branch_base_revs = svn_hash_gets(wc->base_revs, branch->bid);
+ svn_error_t *err;
+ svn_element__content_t *element;
+ svn_revnum_t *base_rev_p;
+
+ if (!branch_base_revs)
+ {
+ return SVN_INVALID_REVNUM;
+ }
+ err = svn_branch__state_get_element(branch, &element, eid, scratch_pool);
+ if (err || !element)
+ {
+ svn_error_clear(err);
+ return SVN_INVALID_REVNUM;
+ }
+
+ base_rev_p = svn_eid__hash_get(branch_base_revs, eid);
+ if (! base_rev_p)
+ return SVN_INVALID_REVNUM;
+ return *base_rev_p;
+}
+
+/* Set the WC base revision to BASE_REV for each element in WC base branch
+ * BRANCH, including nested branches.
+ */
+static svn_error_t *
+svnmover_wc_set_base_revs_r(svnmover_wc_t *wc,
+ svn_branch__state_t *branch,
+ svn_revnum_t base_rev,
+ apr_pool_t *scratch_pool)
+{
+ svn_element__tree_t *elements;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, elements->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t *element;
+
+ svnmover_wc_set_base_rev(wc, branch, eid, base_rev);
+
+ /* recurse into nested branches */
+ SVN_ERR(svn_branch__state_get_element(branch, &element, eid,
+ scratch_pool));
+ if (element->payload->is_subbranch_root)
+ {
+ const char *subbranch_id
+ = svn_branch__id_nest(branch->bid, eid, scratch_pool);
+ svn_branch__state_t *subbranch
+ = svn_branch__txn_get_branch_by_id(branch->txn, subbranch_id,
+ scratch_pool);
+
+ SVN_ERR(svnmover_wc_set_base_revs_r(wc, subbranch,
+ base_rev, scratch_pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Set the WC base revision to BASE_REV for each element in WC base branch
+ * BRANCH, including nested branches.
+ */
+static svn_error_t *
+svnmover_wc_set_base_revs(svnmover_wc_t *wc,
+ svn_branch__state_t *branch,
+ svn_revnum_t base_rev,
+ apr_pool_t *scratch_pool)
+{
+ wc->base_revs = apr_hash_make(wc->pool);
+ SVN_ERR(svnmover_wc_set_base_revs_r(wc, branch, base_rev, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+/* Get the lowest and highest base revision numbers in WC base branch
+ * BRANCH, including nested branches.
+ */
+static svn_error_t *
+svnmover_wc_get_base_revs_r(svnmover_wc_t *wc,
+ svn_revnum_t *base_rev_min,
+ svn_revnum_t *base_rev_max,
+ svn_branch__state_t *branch,
+ apr_pool_t *scratch_pool)
+{
+ svn_element__tree_t *base_elements;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &base_elements,
+ scratch_pool));
+
+ for (hi = apr_hash_first(scratch_pool, base_elements->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_revnum_t rev = svnmover_wc_get_base_rev(wc, branch, eid,
+ scratch_pool);
+ svn_element__content_t *element;
+
+ if (*base_rev_min == SVN_INVALID_REVNUM
+ || rev < *base_rev_min)
+ *base_rev_min = rev;
+ if (*base_rev_max == SVN_INVALID_REVNUM
+ || rev > *base_rev_max)
+ *base_rev_max = rev;
+
+ /* recurse into nested branches */
+ SVN_ERR(svn_branch__state_get_element(branch, &element, eid,
+ scratch_pool));
+ if (element->payload->is_subbranch_root)
+ {
+ const char *subbranch_id
+ = svn_branch__id_nest(branch->bid, eid, scratch_pool);
+ svn_branch__state_t *subbranch
+ = svn_branch__txn_get_branch_by_id(branch->txn, subbranch_id,
+ scratch_pool);
+
+ SVN_ERR(svnmover_wc_get_base_revs_r(wc, base_rev_min, base_rev_max,
+ subbranch, scratch_pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Get the lowest and highest base revision numbers in WC.
+ */
+static svn_error_t *
+svnmover_wc_get_base_revs(svnmover_wc_t *wc,
+ svn_revnum_t *base_rev_min,
+ svn_revnum_t *base_rev_max,
+ apr_pool_t *scratch_pool)
+{
+ *base_rev_min = SVN_INVALID_REVNUM;
+ *base_rev_max = SVN_INVALID_REVNUM;
+ SVN_ERR(svnmover_wc_get_base_revs_r(wc, base_rev_min, base_rev_max,
+ wc->base->branch, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+/* Update the WC to revision BASE_REVISION (SVN_INVALID_REVNUM means HEAD).
+ *
+ * Requires these fields in WC:
+ * head_revision
+ * repos_root_url
+ * ra_session
+ * pool
+ *
+ * Initializes these fields in WC:
+ * base_revision
+ * base_branch_id
+ * base_branch
+ * working_branch_id
+ * working_branch
+ * editor
+ *
+ * Assumes there are no changes in the WC: throws away the existing txn
+ * and starts a new one.
+ */
+static svn_error_t *
+wc_checkout(svnmover_wc_t *wc,
+ svn_revnum_t base_revision,
+ const char *base_branch_id,
+ apr_pool_t *scratch_pool)
+{
+ const char *branch_info_dir = NULL;
+ svn_branch__compat_fetch_func_t fetch_func;
+ void *fetch_baton;
+ svn_branch__txn_t *base_txn;
+
+ /* Validate and store the new base revision number */
+ if (! SVN_IS_VALID_REVNUM(base_revision))
+ base_revision = wc->head_revision;
+ else if (base_revision > wc->head_revision)
+ return svn_error_createf(SVN_ERR_FS_NO_SUCH_REVISION, NULL,
+ _("No such revision %ld (HEAD is %ld)"),
+ base_revision, wc->head_revision);
+
+ /* Choose whether to store branching info in a local dir or in revprops.
+ (For now, just to exercise the options, we choose local files for
+ RA-local and revprops for a remote repo.) */
+ if (strncmp(wc->repos_root_url, "file://", 7) == 0)
+ {
+ const char *repos_dir;
+
+ SVN_ERR(svn_uri_get_dirent_from_file_url(&repos_dir, wc->repos_root_url,
+ scratch_pool));
+ branch_info_dir = svn_dirent_join(repos_dir, "branch-info", scratch_pool);
+ }
+
+ /* Get a mutable transaction based on that rev. (This implementation
+ re-reads all the move-tracking data from the repository.) */
+ SVN_ERR(svn_ra_load_branching_state(&wc->edit_txn,
+ &fetch_func, &fetch_baton,
+ wc->ra_session, branch_info_dir,
+ base_revision,
+ wc->pool, scratch_pool));
+
+ wc->edit_txn = svn_branch__nested_txn_create(wc->edit_txn, wc->pool);
+
+ /* Store the WC base state */
+ base_txn = svn_branch__repos_get_base_revision_root(wc->edit_txn);
+ wc->base = apr_pcalloc(wc->pool, sizeof(*wc->base));
+ wc->base->revision = base_revision;
+ wc->base->branch
+ = svn_branch__txn_get_branch_by_id(base_txn, base_branch_id, scratch_pool);
+ if (! wc->base->branch)
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ "Cannot check out WC: branch %s not found in r%ld",
+ base_branch_id, base_revision);
+ SVN_ERR(svnmover_wc_set_base_revs(wc, wc->base->branch,
+ base_revision, scratch_pool));
+
+ wc->working = apr_pcalloc(wc->pool, sizeof(*wc->working));
+ wc->working->revision = SVN_INVALID_REVNUM;
+ wc->working->branch
+ = svn_branch__txn_get_branch_by_id(wc->edit_txn, base_branch_id,
+ scratch_pool);
+ SVN_ERR_ASSERT(wc->working->branch);
+
+ return SVN_NO_ERROR;
+}
+
+/* Create a simulated WC, in memory.
+ *
+ * Initializes these fields in WC:
+ * head_revision
+ * repos_root_url
+ * ra_session
+ * made_changes
+ * ctx
+ * pool
+ *
+ * BASE_REVISION is the revision to work on, or SVN_INVALID_REVNUM for HEAD.
+ */
+static svn_error_t *
+wc_create(svnmover_wc_t **wc_p,
+ const char *anchor_url,
+ svn_revnum_t base_revision,
+ const char *base_branch_id,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *wc_pool = svn_pool_create(result_pool);
+ svnmover_wc_t *wc = apr_pcalloc(wc_pool, sizeof(*wc));
+
+ wc->pool = wc_pool;
+ wc->ctx = ctx;
+
+ SVN_ERR(svn_client_open_ra_session2(&wc->ra_session, anchor_url,
+ NULL /* wri_abspath */, ctx,
+ wc_pool, scratch_pool));
+
+ SVN_ERR(svn_ra_get_repos_root2(wc->ra_session, &wc->repos_root_url,
+ result_pool));
+ SVN_ERR(svn_ra_get_latest_revnum(wc->ra_session, &wc->head_revision,
+ scratch_pool));
+ SVN_ERR(svn_ra_reparent(wc->ra_session, wc->repos_root_url, scratch_pool));
+
+ SVN_ERR(wc_checkout(wc, base_revision, base_branch_id, scratch_pool));
+ *wc_p = wc;
+ return SVN_NO_ERROR;
+}
+
+svn_error_t *
+svnmover_element_differences(apr_hash_t **diff_p,
+ const svn_element__tree_t *left,
+ const svn_element__tree_t *right,
+ apr_hash_t *elements,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *diff = apr_hash_make(result_pool);
+ apr_hash_index_t *hi;
+
+ if (! left)
+ left = svn_element__tree_create(NULL, 0 /*root_eid*/, scratch_pool);
+ if (! right)
+ right = svn_element__tree_create(NULL, 0 /*root_eid*/, scratch_pool);
+
+ /*SVN_DBG(("element_differences(b%s r%ld, b%s r%ld, e%d)",
+ svn_branch__get_id(left->branch, scratch_pool), left->rev,
+ svn_branch__get_id(right->branch, scratch_pool), right->rev,
+ right->eid));*/
+
+ if (!elements)
+ elements = hash_overlay(left->e_map, right->e_map);
+
+ for (hi = apr_hash_first(scratch_pool, elements);
+ hi; hi = apr_hash_next(hi))
+ {
+ int e = svn_eid__hash_this_key(hi);
+ svn_element__content_t *element_left
+ = svn_element__tree_get(left, e);
+ svn_element__content_t *element_right
+ = svn_element__tree_get(right, e);
+
+ if (! svn_element__content_equal(element_left, element_right,
+ scratch_pool))
+ {
+ svn_element__content_t **contents
+ = apr_palloc(result_pool, 2 * sizeof(void *));
+
+ contents[0] = element_left;
+ contents[1] = element_right;
+ svn_eid__hash_set(diff, e, contents);
+ }
+ }
+
+ *diff_p = diff;
+ return SVN_NO_ERROR;
+}
+
+/* */
+static const char *
+rev_bid_str(const svn_branch__rev_bid_t *rev_bid,
+ apr_pool_t *result_pool)
+{
+ if (!rev_bid)
+ return "<nil>";
+ return apr_psprintf(result_pool, "r%ld.%s", rev_bid->rev, rev_bid->bid);
+}
+
+/* */
+static const char *
+list_parents(svn_branch__history_t *history,
+ apr_pool_t *result_pool)
+{
+ const char *result = "";
+ apr_hash_index_t *hi;
+
+ for (hi = apr_hash_first(result_pool, history->parents);
+ hi; hi = apr_hash_next(hi))
+ {
+ svn_branch__rev_bid_t *parent = apr_hash_this_val(hi);
+ const char *parent_str = rev_bid_str(parent, result_pool);
+
+ result = apr_psprintf(result_pool, "%s%s%s",
+ result, result[0] ? ", " : "", parent_str);
+ }
+ return result;
+}
+
+/* Return a string representation of HISTORY.
+ */
+static const char *
+history_str(svn_branch__history_t *history,
+ apr_pool_t *result_pool)
+{
+ const char *result
+ = list_parents(history, result_pool);
+
+ return apr_psprintf(result_pool, "parents={%s}", result);
+}
+
+/*
+ */
+static svn_error_t *
+svn_branch__history_add_parent(svn_branch__history_t *history,
+ svn_revnum_t rev,
+ const char *branch_id,
+ apr_pool_t *scratch_pool)
+{
+ apr_pool_t *pool = apr_hash_pool_get(history->parents);
+ svn_branch__rev_bid_t *new_parent;
+
+ new_parent = svn_branch__rev_bid_create(rev, branch_id, pool);
+ svn_hash_sets(history->parents, apr_pstrdup(pool, branch_id), new_parent);
+ return SVN_NO_ERROR;
+}
+
+/* Set *DIFFERENCE_P to some sort of indication of the difference between
+ * HISTORY1 and HISTORY2, or to null if there is no difference.
+ *
+ * Inputs may be null.
+ */
+static svn_error_t *
+history_diff(const char **difference_p,
+ svn_branch__history_t *history1,
+ svn_branch__history_t *history2,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *combined;
+ apr_hash_index_t *hi;
+ svn_boolean_t different = FALSE;
+
+ if (! history1)
+ history1 = svn_branch__history_create_empty(scratch_pool);
+ if (! history2)
+ history2 = svn_branch__history_create_empty(scratch_pool);
+ combined = hash_overlay(history1->parents,
+ history2->parents);
+
+ for (hi = apr_hash_first(scratch_pool, combined);
+ hi; hi = apr_hash_next(hi))
+ {
+ const char *bid = apr_hash_this_key(hi);
+ svn_branch__rev_bid_t *parent1 = svn_hash_gets(history1->parents, bid);
+ svn_branch__rev_bid_t *parent2 = svn_hash_gets(history2->parents, bid);
+
+ if (!(parent1 && parent2
+ && svn_branch__rev_bid_equal(parent1, parent2)))
+ {
+ different = TRUE;
+ break;
+ }
+ }
+ if (different)
+ {
+ *difference_p = apr_psprintf(result_pool, "%s -> %s",
+ history_str(history1, scratch_pool),
+ history_str(history2, scratch_pool));
+ }
+ else
+ {
+ *difference_p = NULL;
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Set *IS_CHANGED to true if EDIT_TXN differs from its base txn, else to
+ * false.
+ *
+ * Notice only a difference in content: branches deleted or added, or branch
+ * contents different. Ignore any differences in branch history metadata.
+ *
+ * ### At least we must ignore the "this branch" parent changing from
+ * old-revision to new-revision. However we should probably notice
+ * if a merge parent is added (which means we want to make a commit
+ * recording this merge, even if no content changed), and perhaps
+ * other cases.
+ */
+static svn_error_t *
+txn_is_changed(svn_branch__txn_t *edit_txn,
+ svn_boolean_t *is_changed,
+ apr_pool_t *scratch_pool)
+{
+ int i;
+ svn_branch__txn_t *base_txn
+ = svn_branch__repos_get_base_revision_root(edit_txn);
+ apr_array_header_t *edit_branches
+ = svn_branch__txn_get_branches(edit_txn, scratch_pool);
+ apr_array_header_t *base_branches
+ = svn_branch__txn_get_branches(base_txn, scratch_pool);
+
+ *is_changed = FALSE;
+
+ /* If any previous branch is now missing, that's a change. */
+ for (i = 0; i < base_branches->nelts; i++)
+ {
+ svn_branch__state_t *base_branch = APR_ARRAY_IDX(base_branches, i, void *);
+ svn_branch__state_t *edit_branch
+ = svn_branch__txn_get_branch_by_id(edit_txn, base_branch->bid,
+ scratch_pool);
+
+ if (! edit_branch)
+ {
+ *is_changed = TRUE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* If any current branch is new or changed, that's a change. */
+ for (i = 0; i < edit_branches->nelts; i++)
+ {
+ svn_branch__state_t *edit_branch = APR_ARRAY_IDX(edit_branches, i, void *);
+ svn_branch__state_t *base_branch
+ = svn_branch__txn_get_branch_by_id(base_txn, edit_branch->bid,
+ scratch_pool);
+ svn_element__tree_t *edit_branch_elements, *base_branch_elements;
+ apr_hash_t *diff;
+
+ if (! base_branch)
+ {
+ *is_changed = TRUE;
+ return SVN_NO_ERROR;
+ }
+
+#if 0
+ /* Compare histories */
+ /* ### No, don't. Ignore any differences in branch history metadata. */
+ {
+ svn_branch__history_t *edit_branch_history;
+ svn_branch__history_t *base_branch_history;
+ const char *history_difference;
+
+ SVN_ERR(svn_branch__state_get_history(edit_branch, &edit_branch_history,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_get_history(base_branch, &base_branch_history,
+ scratch_pool));
+ SVN_ERR(history_diff(&history_difference,
+ edit_branch_history,
+ base_branch_history,
+ scratch_pool, scratch_pool));
+ if (history_difference)
+ {
+ *is_changed = TRUE;
+ return SVN_NO_ERROR;
+ }
+ }
+#endif
+
+ /* Compare elements */
+ SVN_ERR(svn_branch__state_get_elements(edit_branch, &edit_branch_elements,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_get_elements(base_branch, &base_branch_elements,
+ scratch_pool));
+ SVN_ERR(svnmover_element_differences(&diff,
+ edit_branch_elements,
+ base_branch_elements,
+ NULL /*all elements*/,
+ scratch_pool, scratch_pool));
+ if (apr_hash_count(diff))
+ {
+ *is_changed = TRUE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Replay the whole-element changes between LEFT_BRANCH and RIGHT_BRANCH
+ * into EDIT_BRANCH.
+ *
+ * Replaying means, for each element E that is changed (added, modified
+ * or deleted) between left and right branches, we set element E in
+ * EDIT_BRANCH to whole value of E in RIGHT_BRANCH. This is not like
+ * merging: each change resets an element's whole value.
+ *
+ * ELEMENTS_TO_DIFF (eid -> [anything]) says which elements to diff; if
+ * null, diff all elements in the union of left & right branches.
+ *
+ * LEFT_BRANCH and/or RIGHT_BRANCH may be null which means the equivalent
+ * of an empty branch.
+ *
+ * Non-recursive: single branch only.
+ */
+static svn_error_t *
+branch_elements_replay(svn_branch__state_t *edit_branch,
+ const svn_branch__state_t *left_branch,
+ const svn_branch__state_t *right_branch,
+ apr_hash_t *elements_to_diff,
+ apr_pool_t *scratch_pool)
+{
+ svn_element__tree_t *s_left = NULL, *s_right = NULL;
+ apr_hash_t *diff_left_right;
+ apr_hash_index_t *hi;
+
+ if (left_branch)
+ SVN_ERR(svn_branch__state_get_elements(left_branch, &s_left,
+ scratch_pool));
+ if (right_branch)
+ SVN_ERR(svn_branch__state_get_elements(right_branch, &s_right,
+ scratch_pool));
+ SVN_ERR(svnmover_element_differences(&diff_left_right,
+ s_left, s_right,
+ elements_to_diff,
+ scratch_pool, scratch_pool));
+
+ /* Go through the per-element differences. */
+ for (hi = apr_hash_first(scratch_pool, diff_left_right);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t **e_pair = apr_hash_this_val(hi);
+ svn_element__content_t *e0 = e_pair[0], *e1 = e_pair[1];
+
+ SVN_ERR_ASSERT(!e0
+ || svn_element__payload_invariants(e0->payload));
+ SVN_ERR_ASSERT(!e1
+ || svn_element__payload_invariants(e1->payload));
+ SVN_ERR(svn_branch__state_set_element(edit_branch, eid,
+ e1, scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+get_union_of_subbranches(apr_hash_t **all_subbranches_p,
+ svn_branch__state_t *left_branch,
+ svn_branch__state_t *right_branch,
+ apr_pool_t *result_pool)
+{
+ apr_hash_t *all_subbranches;
+ svn_branch__subtree_t *s_left = NULL;
+ svn_branch__subtree_t *s_right = NULL;
+
+ if (left_branch)
+ SVN_ERR(svn_branch__get_subtree(left_branch, &s_left,
+ svn_branch__root_eid(left_branch),
+ result_pool));
+ if (right_branch)
+ SVN_ERR(svn_branch__get_subtree(right_branch, &s_right,
+ svn_branch__root_eid(right_branch),
+ result_pool));
+ all_subbranches
+ = (s_left && s_right) ? hash_overlay(s_left->subbranches,
+ s_right->subbranches)
+ : s_left ? s_left->subbranches
+ : s_right ? s_right->subbranches
+ : apr_hash_make(result_pool);
+
+ *all_subbranches_p = all_subbranches;
+ return SVN_NO_ERROR;
+}
+
+/* Replay differences between S_LEFT and S_RIGHT into EDITOR:EDIT_BRANCH.
+ *
+ * S_LEFT or S_RIGHT (but not both) may be null meaning an empty set.
+ *
+ * Recurse into subbranches.
+ */
+static svn_error_t *
+svn_branch__replay(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ svn_branch__state_t *left_branch,
+ svn_branch__state_t *right_branch,
+ apr_pool_t *scratch_pool)
+{
+ assert((left_branch && right_branch)
+ ? (svn_branch__root_eid(left_branch) == svn_branch__root_eid(right_branch))
+ : (left_branch || right_branch));
+
+ if (right_branch)
+ {
+ /* Replay this branch */
+ apr_hash_t *elements_to_diff = NULL; /*means the union of left & right*/
+
+ SVN_ERR(branch_elements_replay(edit_branch, left_branch, right_branch,
+ elements_to_diff, scratch_pool));
+ }
+ else
+ {
+ /* deleted branch LEFT */
+ /* nothing to do -- it will go away because we deleted the outer-branch
+ element where it was attached */
+ }
+
+ /* Replay any change in history */
+ /* ### Actually, here we just set the output history to the right-hand-side
+ history if that differs from left-hand-side.
+ This doesn't seem right, in general. It's OK if we're just copying
+ a txn into a fresh txn, as for example we do during commit. */
+ {
+ svn_branch__history_t *left_history = NULL;
+ svn_branch__history_t *right_history = NULL;
+ const char *history_difference;
+
+ if (left_branch)
+ SVN_ERR(svn_branch__state_get_history(left_branch, &left_history,
+ scratch_pool));
+ if (right_branch)
+ SVN_ERR(svn_branch__state_get_history(right_branch, &right_history,
+ scratch_pool));
+ SVN_ERR(history_diff(&history_difference, left_history, right_history,
+ scratch_pool, scratch_pool));
+ if (history_difference)
+ {
+ SVN_ERR(svn_branch__state_set_history(edit_branch, right_history,
+ scratch_pool));
+ }
+ }
+
+ /* Replay its subbranches, recursively.
+ (If we're deleting the current branch, we don't also need to
+ explicitly delete its subbranches... do we?) */
+ if (right_branch)
+ {
+ apr_hash_t *all_subbranches;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(get_union_of_subbranches(&all_subbranches,
+ left_branch, right_branch, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, all_subbranches);
+ hi; hi = apr_hash_next(hi))
+ {
+ int this_eid = svn_eid__hash_this_key(hi);
+ svn_branch__state_t *left_subbranch = NULL;
+ svn_branch__state_t *right_subbranch = NULL;
+ svn_branch__state_t *edit_subbranch = NULL;
+
+ if (left_branch)
+ SVN_ERR(svn_branch__get_subbranch_at_eid(
+ left_branch, &left_subbranch, this_eid, scratch_pool));
+ if (right_branch)
+ SVN_ERR(svn_branch__get_subbranch_at_eid(
+ right_branch, &right_subbranch, this_eid, scratch_pool));
+ /* If the subbranch is to be edited or added, first look up the
+ corresponding edit subbranch, or, if not found, create one. */
+ if (right_subbranch)
+ {
+ const char *new_branch_id
+ = svn_branch__id_nest(edit_branch->bid, this_eid, scratch_pool);
+
+ SVN_ERR(svn_branch__txn_open_branch(edit_txn, &edit_subbranch,
+ new_branch_id,
+ svn_branch__root_eid(right_subbranch),
+ NULL /*tree_ref*/,
+ scratch_pool, scratch_pool));
+ }
+
+ /* recurse */
+ if (edit_subbranch)
+ {
+ SVN_ERR(svn_branch__replay(edit_txn, edit_subbranch,
+ left_subbranch, right_subbranch,
+ scratch_pool));
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Replay differences between LEFT_BRANCH and RIGHT_BRANCH into
+ * EDIT_ROOT_BRANCH.
+ * (Recurse into subbranches.)
+ */
+static svn_error_t *
+replay(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_root_branch,
+ svn_branch__state_t *left_branch,
+ svn_branch__state_t *right_branch,
+ apr_pool_t *scratch_pool)
+{
+ SVN_ERR_ASSERT(left_branch || right_branch);
+
+ SVN_ERR(svn_branch__replay(edit_txn, edit_root_branch,
+ left_branch, right_branch, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit_callback(const svn_commit_info_t *commit_info,
+ void *baton,
+ apr_pool_t *pool);
+
+/* Baton for commit_callback(). */
+typedef struct commit_callback_baton_t
+{
+ svn_branch__txn_t *edit_txn;
+ const char *wc_base_branch_id;
+ const char *wc_commit_branch_id;
+
+ /* just-committed revision */
+ svn_revnum_t revision;
+} commit_callback_baton_t;
+
+static svn_error_t *
+display_diff_of_commit(const commit_callback_baton_t *ccbb,
+ apr_pool_t *scratch_pool);
+
+static svn_error_t *
+do_topbranch(svn_branch__state_t **new_branch_p,
+ svn_branch__txn_t *txn,
+ svn_branch__rev_bid_eid_t *from,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Allocate the same number of new EIDs in NEW_TXN as are already
+ * allocated in OLD_TXN.
+ */
+static svn_error_t *
+allocate_eids(svn_branch__txn_t *new_txn,
+ const svn_branch__txn_t *old_txn,
+ apr_pool_t *scratch_pool)
+{
+ int num_new_eids;
+ int i;
+
+ SVN_ERR(svn_branch__txn_get_num_new_eids(old_txn, &num_new_eids,
+ scratch_pool));
+ for (i = 0; i < num_new_eids; i++)
+ {
+ SVN_ERR(svn_branch__txn_new_eid(new_txn, NULL, scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Update the EIDs, given that a commit has translated all new EIDs
+ * (negative numbers) to regular EIDs (positive numbers).
+ *
+ * ### TODO: This will need to take and use a new-EID-translation rule
+ * that must be returned by the commit, as we must not guess (as we
+ * presently do) what translation the server performed. This guess
+ * will fail once the server does rebasing on commit.
+ */
+static svn_error_t *
+update_wc_eids(svnmover_wc_t *wc,
+ apr_pool_t *scratch_pool)
+{
+ SVN_ERR(allocate_eids(wc->base->branch->txn, wc->working->branch->txn,
+ scratch_pool));
+ SVN_ERR(svn_branch__txn_finalize_eids(wc->base->branch->txn, scratch_pool));
+ SVN_ERR(svn_branch__txn_finalize_eids(wc->working->branch->txn, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+/* Update the WC base value of each committed element to match the
+ * corresponding WC working element value.
+ * Update the WC base revision for each committed element to NEW_REV.
+ *
+ * The committed elements are determined by diffing base against working.
+ * ### TODO: When we allow committing a subset of the WC, we'll need to
+ * pass in a list of the committed elements.
+ *
+ * BASE_BRANCH and/or WORK_BRANCH may be null.
+ */
+static svn_error_t *
+update_wc_base_r(svnmover_wc_t *wc,
+ svn_branch__state_t *base_branch,
+ svn_branch__state_t *work_branch,
+ svn_revnum_t new_rev,
+ apr_pool_t *scratch_pool)
+{
+ svn_element__tree_t *base_elements = NULL, *working_elements = NULL;
+ apr_hash_t *committed_elements;
+ apr_hash_index_t *hi;
+
+ if (base_branch)
+ SVN_ERR(svn_branch__state_get_elements(base_branch, &base_elements,
+ scratch_pool));
+ if (work_branch)
+ SVN_ERR(svn_branch__state_get_elements(work_branch, &working_elements,
+ scratch_pool));
+ SVN_ERR(svnmover_element_differences(&committed_elements,
+ base_elements, working_elements,
+ NULL /*all elements*/,
+ scratch_pool, scratch_pool));
+
+ for (hi = apr_hash_first(scratch_pool, committed_elements);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t *content = NULL;
+
+ if (work_branch)
+ SVN_ERR(svn_branch__state_get_element(work_branch, &content,
+ eid, scratch_pool));
+ SVN_ERR(svn_branch__state_set_element(base_branch, eid,
+ content, scratch_pool));
+ svnmover_wc_set_base_rev(wc, base_branch, eid, new_rev);
+
+ /* recurse into nested branches that exist in working */
+ if (content && content->payload->is_subbranch_root)
+ {
+ svn_branch__state_t *base_subbranch = NULL;
+ svn_branch__state_t *work_subbranch = NULL;
+
+ if (base_branch)
+ {
+ base_subbranch
+ = svn_branch__txn_get_branch_by_id(
+ base_branch->txn,
+ svn_branch__id_nest(base_branch->bid, eid, scratch_pool),
+ scratch_pool);
+ }
+ if (work_branch)
+ {
+ work_subbranch
+ = svn_branch__txn_get_branch_by_id(
+ work_branch->txn,
+ svn_branch__id_nest(work_branch->bid, eid, scratch_pool),
+ scratch_pool);
+ }
+ if (work_subbranch && !base_subbranch)
+ {
+ const char *new_branch_id
+ = svn_branch__id_nest(base_branch->bid, eid, scratch_pool);
+ svn_branch__history_t *history;
+
+ SVN_ERR(svn_branch__txn_open_branch(base_branch->txn,
+ &base_subbranch,
+ new_branch_id,
+ svn_branch__root_eid(work_subbranch),
+ NULL /*tree_ref*/,
+ scratch_pool, scratch_pool));
+ SVN_ERR(svn_branch__state_get_history(
+ work_subbranch, &history, scratch_pool));
+ SVN_ERR(svn_branch__state_set_history(
+ base_subbranch, history, scratch_pool));
+ }
+ SVN_ERR(update_wc_base_r(wc, base_subbranch, work_subbranch,
+ new_rev, scratch_pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Update the WC base value of each committed element to match the
+ * corresponding WC working element value.
+ * Update the WC base revision for each committed element to NEW_REV.
+ *
+ * The committed elements are determined by diffing base against working.
+ * ### TODO: When we allow committing a subset of the WC, we'll need to
+ * pass in a list of the committed elements.
+ *
+ * ### This should be equivalent to 'replay(base, base, working)'. Use that
+ * instead.
+ */
+static svn_error_t *
+update_wc_base(svnmover_wc_t *wc,
+ svn_revnum_t new_rev,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__state_t *base_branch = wc->base->branch;
+ svn_branch__state_t *work_branch = wc->working->branch;
+ SVN_ERR(update_wc_base_r(wc, base_branch, work_branch,
+ new_rev, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+/* Commit the changes from WC into the repository.
+ *
+ * Open a new commit txn to the repo. Replay the changes from WC into it.
+ * Update the WC base for the committed elements.
+ *
+ * Set WC->head_revision and *NEW_REV_P to the committed revision number.
+ *
+ * If there are no changes to commit, set *NEW_REV_P to SVN_INVALID_REVNUM
+ * and do not make a commit and do not change WC->head_revision.
+ *
+ * NEW_REV_P may be null if not wanted.
+ */
+static svn_error_t *
+wc_commit(svn_revnum_t *new_rev_p,
+ svnmover_wc_t *wc,
+ apr_hash_t *revprops,
+ apr_pool_t *scratch_pool)
+{
+ const char *branch_info_dir = NULL;
+ svn_branch__txn_t *commit_txn;
+ commit_callback_baton_t ccbb;
+ svn_boolean_t change_detected;
+ const char *edit_root_branch_id;
+ svn_branch__state_t *edit_root_branch;
+
+ SVN_ERR(txn_is_changed(wc->working->branch->txn, &change_detected,
+ scratch_pool));
+ if (! change_detected)
+ {
+ wc->list_of_commands = NULL;
+ if (new_rev_p)
+ *new_rev_p = SVN_INVALID_REVNUM;
+ return SVN_NO_ERROR;
+ }
+
+ /* If no log msg provided, use the list of commands */
+ if (! svn_hash_gets(revprops, SVN_PROP_REVISION_LOG) && wc->list_of_commands)
+ {
+ /* Avoid modifying the passed-in revprops hash */
+ revprops = apr_hash_copy(scratch_pool, revprops);
+
+ svn_hash_sets(revprops, SVN_PROP_REVISION_LOG,
+ svn_string_create(wc->list_of_commands, scratch_pool));
+ }
+
+ /* Choose whether to store branching info in a local dir or in revprops.
+ (For now, just to exercise the options, we choose local files for
+ RA-local and revprops for a remote repo.) */
+ if (strncmp(wc->repos_root_url, "file://", 7) == 0)
+ {
+ const char *repos_dir;
+
+ SVN_ERR(svn_uri_get_dirent_from_file_url(&repos_dir, wc->repos_root_url,
+ scratch_pool));
+ branch_info_dir = svn_dirent_join(repos_dir, "branch-info", scratch_pool);
+ }
+
+ /* Start a new editor for the commit. */
+ SVN_ERR(svn_ra_get_commit_txn(wc->ra_session,
+ &commit_txn,
+ revprops,
+ commit_callback, &ccbb,
+ NULL /*lock_tokens*/, FALSE /*keep_locks*/,
+ branch_info_dir,
+ scratch_pool));
+ /*SVN_ERR(svn_branch__txn_get_debug(&wc->edit_txn, wc->edit_txn, scratch_pool));*/
+
+ edit_root_branch_id = wc->working->branch->bid;
+ edit_root_branch = svn_branch__txn_get_branch_by_id(
+ commit_txn, wc->working->branch->bid, scratch_pool);
+
+ /* We might be creating a new top-level branch in this commit. That is the
+ only case in which the working branch will not be found in EDIT_TXN.
+ (Creating any other branch can only be done inside a checkout of a
+ parent branch.) So, maybe create a new top-level branch. */
+ if (! edit_root_branch)
+ {
+ /* Create a new top-level branch in the edited state. (It will have
+ an independent new top-level branch number.) */
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(wc->base->revision,
+ wc->base->branch->bid,
+ svn_branch__root_eid(wc->base->branch),
+ scratch_pool);
+
+ SVN_ERR(do_topbranch(&edit_root_branch, commit_txn,
+ from, scratch_pool, scratch_pool));
+ edit_root_branch_id = edit_root_branch->bid;
+ }
+ /* Allocate all the new eids we'll need in this new txn */
+ SVN_ERR(allocate_eids(commit_txn, wc->working->branch->txn, scratch_pool));
+ SVN_ERR(replay(commit_txn, edit_root_branch,
+ wc->base->branch,
+ wc->working->branch,
+ scratch_pool));
+
+ ccbb.edit_txn = commit_txn;
+ ccbb.wc_base_branch_id = wc->base->branch->bid;
+ ccbb.wc_commit_branch_id = edit_root_branch_id;
+
+ SVN_ERR(svn_branch__txn_complete(commit_txn, scratch_pool));
+ SVN_ERR(update_wc_eids(wc, scratch_pool));
+ SVN_ERR(update_wc_base(wc, ccbb.revision, scratch_pool));
+ SVN_ERR(display_diff_of_commit(&ccbb, scratch_pool));
+
+ wc->head_revision = ccbb.revision;
+ if (new_rev_p)
+ *new_rev_p = ccbb.revision;
+
+ wc->list_of_commands = NULL;
+
+ return SVN_NO_ERROR;
+}
+
+typedef enum action_code_t {
+ ACTION_INFO_WC,
+ ACTION_INFO,
+ ACTION_LIST_CONFLICTS,
+ ACTION_RESOLVED_CONFLICT,
+ ACTION_DIFF,
+ ACTION_LOG,
+ ACTION_LIST_BRANCHES,
+ ACTION_LIST_BRANCHES_R,
+ ACTION_LS,
+ ACTION_TBRANCH,
+ ACTION_BRANCH,
+ ACTION_BRANCH_INTO,
+ ACTION_MKBRANCH,
+ ACTION_MERGE3,
+ ACTION_AUTO_MERGE,
+ ACTION_MV,
+ ACTION_MKDIR,
+ ACTION_PUT_FILE,
+ ACTION_CAT,
+ ACTION_CP,
+ ACTION_RM,
+ ACTION_CP_RM,
+ ACTION_BR_RM,
+ ACTION_BR_INTO_RM,
+ ACTION_COMMIT,
+ ACTION_UPDATE,
+ ACTION_SWITCH,
+ ACTION_STATUS,
+ ACTION_REVERT,
+ ACTION_MIGRATE
+} action_code_t;
+
+typedef struct action_defn_t {
+ enum action_code_t code;
+ const char *name;
+ int num_args;
+ const char *args_help;
+ const char *help;
+} action_defn_t;
+
+#define NL "\n "
+static const action_defn_t action_defn[] =
+{
+ {ACTION_INFO_WC, "info-wc", 0, "",
+ "print information about the WC"},
+ {ACTION_INFO, "info", 1, "PATH",
+ "show info about the element at PATH"},
+ {ACTION_LIST_CONFLICTS, "conflicts", 0, "",
+ "list unresolved conflicts"},
+ {ACTION_RESOLVED_CONFLICT,"resolved", 1, "CONFLICT_ID",
+ "mark conflict as resolved"},
+ {ACTION_LIST_BRANCHES, "branches", 1, "PATH",
+ "list all branches rooted at the same element as PATH"},
+ {ACTION_LIST_BRANCHES_R, "ls-br-r", 0, "",
+ "list all branches, recursively"},
+ {ACTION_LS, "ls", 1, "PATH",
+ "list elements in the branch found at PATH"},
+ {ACTION_LOG, "log", 2, "FROM@REV TO@REV",
+ "show per-revision diffs between FROM and TO"},
+ {ACTION_TBRANCH, "tbranch", 1, "SRC",
+ "branch the branch-root or branch-subtree at SRC" NL
+ "to make a new top-level branch"},
+ {ACTION_BRANCH, "branch", 2, "SRC DST",
+ "branch the branch-root or branch-subtree at SRC" NL
+ "to make a new branch at DST"},
+ {ACTION_BRANCH_INTO, "branch-into", 2, "SRC DST",
+ "make a branch of the existing subtree SRC appear at" NL
+ "DST as part of the existing branch that contains DST" NL
+ "(like merging the creation of SRC to DST)"},
+ {ACTION_MKBRANCH, "mkbranch", 1, "ROOT",
+ "make a directory that's the root of a new subbranch"},
+ {ACTION_DIFF, "diff", 2, "LEFT@REV RIGHT@REV",
+ "show differences from subtree LEFT to subtree RIGHT"},
+ {ACTION_MERGE3, "merge", 3, "FROM TO YCA@REV",
+ "3-way merge YCA->FROM into TO"},
+ {ACTION_AUTO_MERGE, "automerge", 2, "FROM TO",
+ "automatic merge FROM into TO"},
+ {ACTION_CP, "cp", 2, "REV SRC DST",
+ "copy SRC@REV to DST"},
+ {ACTION_MV, "mv", 2, "SRC DST",
+ "move SRC to DST"},
+ {ACTION_RM, "rm", 1, "PATH",
+ "delete PATH"},
+ {ACTION_CP_RM, "copy-and-delete", 2, "SRC DST",
+ "copy-and-delete SRC to DST"},
+ {ACTION_BR_RM, "branch-and-delete", 2, "SRC DST",
+ "branch-and-delete SRC to DST"},
+ {ACTION_BR_INTO_RM, "branch-into-and-delete", 2, "SRC DST",
+ "merge-and-delete SRC to DST"},
+ {ACTION_MKDIR, "mkdir", 1, "PATH",
+ "create new directory PATH"},
+ {ACTION_PUT_FILE, "put", 2, "LOCAL_FILE PATH",
+ "add or modify file PATH with text copied from" NL
+ "LOCAL_FILE (use \"-\" to read from standard input)"},
+ {ACTION_CAT, "cat", 1, "PATH",
+ "display text (for a file) and props (if any) of PATH"},
+ {ACTION_COMMIT, "commit", 0, "",
+ "commit the changes"},
+ {ACTION_UPDATE, "update", 1, ".@REV",
+ "update to revision REV, keeping local changes"},
+ {ACTION_SWITCH, "switch", 1, "TARGET[@REV]",
+ "switch to another branch and/or revision, keeping local changes"},
+ {ACTION_STATUS, "status", 0, "",
+ "same as 'diff .@base .'"},
+ {ACTION_REVERT, "revert", 0, "",
+ "revert all uncommitted changes"},
+ {ACTION_MIGRATE, "migrate", 1, ".@REV",
+ "migrate changes from non-move-tracking revision"},
+};
+
+typedef struct action_t {
+ /* The original command words (const char *) by which the action was
+ specified */
+ apr_array_header_t *action_args;
+
+ action_code_t action;
+
+ /* argument revisions */
+ svn_opt_revision_t rev_spec[3];
+
+ const char *branch_id[3];
+
+ /* argument paths */
+ const char *relpath[3];
+} action_t;
+
+/* ====================================================================== */
+
+/* Find the deepest branch in the repository of which REVNUM:BRANCH_ID:RELPATH
+ * is either the root element or a normal, non-sub-branch element.
+ *
+ * RELPATH is a repository-relative path. REVNUM is a revision number, or
+ * SVN_INVALID_REVNUM meaning the current txn.
+ *
+ * Return the location of the element in that branch, or with
+ * EID=-1 if no element exists there.
+ *
+ * If BRANCH_ID is null, the default is the WC base branch when REVNUM is
+ * specified, and the WC working branch when REVNUM is SVN_INVALID_REVNUM.
+ *
+ * Return an error if branch BRANCH_ID does not exist in r<REVNUM>; otherwise,
+ * the result will never be NULL, as every path is within at least the root
+ * branch.
+ */
+static svn_error_t *
+find_el_rev_by_rrpath_rev(svn_branch__el_rev_id_t **el_rev_p,
+ svnmover_wc_t *wc,
+ const svn_opt_revision_t *rev_spec,
+ const char *branch_id,
+ const char *relpath,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ if (rev_spec->kind == svn_opt_revision_number
+ || rev_spec->kind == svn_opt_revision_head)
+ {
+ svn_revnum_t revnum
+ = (rev_spec->kind == svn_opt_revision_number)
+ ? rev_spec->value.number : wc->head_revision;
+ const svn_branch__repos_t *repos = wc->working->branch->txn->repos;
+
+ if (! branch_id)
+ branch_id = wc->base->branch->bid;
+ SVN_ERR(svn_branch__repos_find_el_rev_by_path_rev(el_rev_p, repos,
+ revnum,
+ branch_id,
+ relpath,
+ result_pool,
+ scratch_pool));
+ }
+ else if (rev_spec->kind == svn_opt_revision_unspecified
+ || rev_spec->kind == svn_opt_revision_working
+ || rev_spec->kind == svn_opt_revision_base
+ || rev_spec->kind == svn_opt_revision_committed)
+ {
+ svn_branch__state_t *branch
+ = branch_id ? svn_branch__txn_get_branch_by_id(
+ wc->working->branch->txn, branch_id, scratch_pool)
+ : wc->working->branch;
+ svn_branch__el_rev_id_t *el_rev = apr_palloc(result_pool, sizeof(*el_rev));
+
+ if (! branch)
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("Branch %s not found in working state"),
+ branch_id);
+ SVN_ERR(svn_branch__find_nested_branch_element_by_relpath(
+ &el_rev->branch, &el_rev->eid,
+ branch, relpath, scratch_pool));
+ if (rev_spec->kind == svn_opt_revision_unspecified
+ || rev_spec->kind == svn_opt_revision_working)
+ {
+ el_rev->rev = SVN_INVALID_REVNUM;
+ }
+ else
+ {
+ el_rev->rev = svnmover_wc_get_base_rev(wc, el_rev->branch,
+ el_rev->eid, scratch_pool);
+ }
+ *el_rev_p = el_rev;
+ }
+ else
+ {
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "'%s@...': revision specifier "
+ "must be a number or 'head', 'base' "
+ "or 'committed'",
+ relpath);
+ }
+ SVN_ERR_ASSERT(*el_rev_p);
+ return SVN_NO_ERROR;
+}
+
+/* Return a string suitable for appending to a displayed element name or
+ * element id to indicate that it is a subbranch root element for SUBBRANCH.
+ * Return "" if SUBBRANCH is null.
+ */
+static const char *
+branch_str(svn_branch__state_t *subbranch,
+ apr_pool_t *result_pool)
+{
+ if (subbranch)
+ return apr_psprintf(result_pool,
+ " (branch %s)",
+ svn_branch__get_id(subbranch, result_pool));
+ return "";
+}
+
+/* Return a string suitable for appending to a displayed element name or
+ * element id to indicate that BRANCH:EID is a subbranch root element.
+ * Return "" if the element is not a subbranch root element.
+ */
+static const char *
+subbranch_str(svn_branch__state_t *branch,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ svn_branch__state_t *subbranch;
+
+ svn_error_clear(svn_branch__get_subbranch_at_eid(branch, &subbranch,
+ eid, result_pool));
+ return branch_str(subbranch, result_pool);
+}
+
+/* */
+static const char *
+subtree_subbranch_str(svn_branch__subtree_t *subtree,
+ const char *bid,
+ int eid,
+ apr_pool_t *result_pool)
+{
+ svn_branch__subtree_t *subbranch
+ = svn_branch__subtree_get_subbranch_at_eid(subtree, eid, result_pool);
+
+ if (subbranch)
+ return apr_psprintf(result_pool,
+ " (branch %s)",
+ svn_branch__id_nest(bid, eid, result_pool));
+ return "";
+}
+
+/* */
+static const char *
+el_rev_id_to_path(svn_branch__el_rev_id_t *el_rev,
+ apr_pool_t *result_pool)
+{
+ const char *path
+ = svn_branch__get_rrpath_by_eid(el_rev->branch, el_rev->eid, result_pool);
+
+ return path;
+}
+
+/* */
+static const char *
+branch_peid_name_to_path(svn_branch__state_t *to_branch,
+ int to_parent_eid,
+ const char *to_name,
+ apr_pool_t *result_pool)
+{
+ const char *path
+ = svn_relpath_join(svn_branch__get_rrpath_by_eid(to_branch, to_parent_eid,
+ result_pool),
+ to_name, result_pool);
+
+ return path;
+}
+
+/* */
+static int
+sort_compare_eid_mappings_by_path(const svn_sort__item_t *a,
+ const svn_sort__item_t *b)
+{
+ const char *astr = a->value, *bstr = b->value;
+
+ return svn_path_compare_paths(astr, bstr);
+}
+
+/* List the elements in BRANCH, in path notation.
+ *
+ * List only the elements for which a relpath is known -- that is, elements
+ * whose parents exist all the way up to the branch root.
+ */
+static svn_error_t *
+list_branch_elements(svn_branch__state_t *branch,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *eid_to_path = apr_hash_make(scratch_pool);
+ svn_element__tree_t *elements;
+ apr_hash_index_t *hi;
+ svn_eid__hash_iter_t *ei;
+
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, elements->e_map);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ const char *relpath = svn_branch__get_path_by_eid(branch, eid,
+ scratch_pool);
+
+ svn_eid__hash_set(eid_to_path, eid, relpath);
+ }
+ for (SVN_EID__HASH_ITER_SORTED(ei, eid_to_path,
+ sort_compare_eid_mappings_by_path,
+ scratch_pool))
+ {
+ int eid = ei->eid;
+ const char *relpath = ei->val;
+
+ svnmover_notify(" %-20s%s",
+ relpath[0] ? relpath : ".",
+ subbranch_str(branch, eid, scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* */
+static int
+sort_compare_items_by_eid(const svn_sort__item_t *a,
+ const svn_sort__item_t *b)
+{
+ int eid_a = *(const int *)a->key;
+ int eid_b = *(const int *)b->key;
+
+ return eid_a - eid_b;
+}
+
+static const char *
+peid_name(const svn_element__content_t *element,
+ apr_pool_t *scratch_pool)
+{
+ if (element->parent_eid == -1)
+ return apr_psprintf(scratch_pool, "%3s %-10s", "", ".");
+
+ return apr_psprintf(scratch_pool, "%3d/%-10s",
+ element->parent_eid, element->name);
+}
+
+static const char elements_by_eid_header[]
+ = " eid parent-eid/name\n"
+ " --- ----------/----";
+
+/* List all elements in branch BRANCH, in element notation.
+ */
+static svn_error_t *
+list_branch_elements_by_eid(svn_branch__state_t *branch,
+ apr_pool_t *scratch_pool)
+{
+ svn_element__tree_t *elements;
+ svn_eid__hash_iter_t *ei;
+
+ svnmover_notify_v("%s", elements_by_eid_header);
+ SVN_ERR(svn_branch__state_get_elements(branch, &elements, scratch_pool));
+ for (SVN_EID__HASH_ITER_SORTED_BY_EID(ei, elements->e_map, scratch_pool))
+ {
+ int eid = ei->eid;
+ svn_element__content_t *element = ei->val;
+
+ if (element)
+ {
+ svnmover_notify(" e%-3d %21s%s",
+ eid,
+ peid_name(element, scratch_pool),
+ subbranch_str(branch, eid, scratch_pool));
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* */
+static const char *
+branch_id_header_str(const char *prefix,
+ apr_pool_t *result_pool)
+{
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ return apr_psprintf(result_pool,
+ "%sbranch-id root-path\n"
+ "%s--------- ---------",
+ prefix, prefix);
+ }
+ else
+ {
+ return apr_psprintf(result_pool,
+ "%sbranch-id branch-name root-eid\n"
+ "%s--------- ----------- --------",
+ prefix, prefix);
+ }
+}
+
+/* Show the id and path or root-eid of BRANCH.
+ */
+static const char *
+branch_id_str(svn_branch__state_t *branch,
+ apr_pool_t *result_pool)
+{
+ apr_pool_t *scratch_pool = result_pool;
+
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ return apr_psprintf(result_pool, "%-10s /%s",
+ svn_branch__get_id(branch, scratch_pool),
+ svn_branch__get_root_rrpath(branch, scratch_pool));
+ }
+ else
+ {
+ svn_element__content_t *outer_el = NULL;
+ svn_branch__state_t *outer_branch;
+ int outer_eid;
+
+ svn_branch__get_outer_branch_and_eid(&outer_branch, &outer_eid,
+ branch, scratch_pool);
+
+ if (outer_branch)
+ svn_error_clear(svn_branch__state_get_element(outer_branch, &outer_el,
+ outer_eid, scratch_pool));
+
+ return apr_psprintf(result_pool, "%-10s %-12s root=e%d",
+ svn_branch__get_id(branch, scratch_pool),
+ outer_el ? outer_el->name : "/",
+ svn_branch__root_eid(branch));
+ }
+}
+
+/* List the branch BRANCH.
+ *
+ * If WITH_ELEMENTS is true, also list the elements in it.
+ */
+static svn_error_t *
+list_branch(svn_branch__state_t *branch,
+ svn_boolean_t with_elements,
+ apr_pool_t *scratch_pool)
+{
+ svnmover_notify(" %s", branch_id_str(branch, scratch_pool));
+
+ if (with_elements)
+ {
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ SVN_ERR(list_branch_elements(branch, scratch_pool));
+ }
+ else
+ {
+ SVN_ERR(list_branch_elements_by_eid(branch, scratch_pool));
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+/* List all branches rooted at EID.
+ *
+ * If WITH_ELEMENTS is true, also list the elements in each branch.
+ */
+static svn_error_t *
+list_branches(svn_branch__txn_t *txn,
+ int eid,
+ svn_boolean_t with_elements,
+ apr_pool_t *scratch_pool)
+{
+ const apr_array_header_t *branches;
+ int i;
+ svn_boolean_t printed_header = FALSE;
+
+ svnmover_notify_v("%s", branch_id_header_str(" ", scratch_pool));
+
+ branches = svn_branch__txn_get_branches(txn, scratch_pool);
+
+ for (i = 0; i < branches->nelts; i++)
+ {
+ svn_branch__state_t *branch = APR_ARRAY_IDX(branches, i, void *);
+
+ if (svn_branch__root_eid(branch) != eid)
+ continue;
+
+ SVN_ERR(list_branch(branch, with_elements, scratch_pool));
+ if (with_elements) /* separate branches by a blank line */
+ svnmover_notify("%s", "");
+ }
+
+ for (i = 0; i < branches->nelts; i++)
+ {
+ svn_branch__state_t *branch = APR_ARRAY_IDX(branches, i, void *);
+ svn_element__content_t *element;
+
+ SVN_ERR(svn_branch__state_get_element(branch, &element,
+ eid, scratch_pool));
+ if (! element
+ || svn_branch__root_eid(branch) == eid)
+ continue;
+
+ if (! printed_header)
+ {
+ if (the_ui_mode == UI_MODE_PATHS)
+ svnmover_notify_v("branches containing but not rooted at that element:");
+ else
+ svnmover_notify_v("branches containing but not rooted at e%d:", eid);
+ printed_header = TRUE;
+ }
+ SVN_ERR(list_branch(branch, with_elements, scratch_pool));
+ if (with_elements) /* separate branches by a blank line */
+ svnmover_notify("%s", "");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* List all branches. If WITH_ELEMENTS is true, also list the elements
+ * in each branch.
+ */
+static svn_error_t *
+list_all_branches(svn_branch__txn_t *txn,
+ svn_boolean_t with_elements,
+ apr_pool_t *scratch_pool)
+{
+ const apr_array_header_t *branches;
+ int i;
+
+ branches = svn_branch__txn_get_branches(txn, scratch_pool);
+
+ svnmover_notify_v("branches:");
+
+ for (i = 0; i < branches->nelts; i++)
+ {
+ svn_branch__state_t *branch = APR_ARRAY_IDX(branches, i, void *);
+
+ SVN_ERR(list_branch(branch, with_elements, scratch_pool));
+ if (with_elements) /* separate branches by a blank line */
+ svnmover_notify("%s", "");
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Switch the WC to revision REVISION (SVN_INVALID_REVNUM means HEAD)
+ * and branch TARGET_BRANCH.
+ *
+ * Merge any changes in the existing txn into the new txn.
+ */
+static svn_error_t *
+do_switch(svnmover_wc_t *wc,
+ svn_revnum_t revision,
+ svn_branch__state_t *target_branch,
+ apr_pool_t *scratch_pool)
+{
+ const char *target_branch_id
+ = svn_branch__get_id(target_branch, scratch_pool);
+ /* Keep hold of the previous WC txn */
+ svn_branch__state_t *previous_base_br = wc->base->branch;
+ svn_branch__state_t *previous_working_br = wc->working->branch;
+ svn_boolean_t has_local_changes;
+
+ SVN_ERR(txn_is_changed(previous_working_br->txn,
+ &has_local_changes, scratch_pool));
+
+ /* Usually one would switch the WC to another branch (or just another
+ revision) rooted at the same element. Switching to a branch rooted
+ at a different element is well defined, but give a warning. */
+ if (has_local_changes
+ && svn_branch__root_eid(target_branch)
+ != svn_branch__root_eid(previous_base_br))
+ {
+ svnmover_notify(_("Warning: you are switching from %s rooted at e%d "
+ "to %s rooted at e%d, a different root element, "
+ "while there are local changes. "),
+ svn_branch__get_id(previous_base_br, scratch_pool),
+ svn_branch__root_eid(previous_base_br),
+ target_branch_id,
+ svn_branch__root_eid(target_branch));
+ }
+
+ /* Complete the old edit drive into the 'WC' txn */
+ SVN_ERR(svn_branch__txn_sequence_point(wc->edit_txn, scratch_pool));
+
+ /* Check out a new WC, re-using the same data object */
+ SVN_ERR(wc_checkout(wc, revision, target_branch_id, scratch_pool));
+
+ if (has_local_changes)
+ {
+ svn_branch__el_rev_id_t *yca, *src, *tgt;
+
+ /* Merge changes from the old into the new WC */
+ yca = svn_branch__el_rev_id_create(previous_base_br,
+ svn_branch__root_eid(previous_base_br),
+ previous_base_br->txn->rev,
+ scratch_pool);
+ src = svn_branch__el_rev_id_create(previous_working_br,
+ svn_branch__root_eid(previous_working_br),
+ SVN_INVALID_REVNUM, scratch_pool);
+ tgt = svn_branch__el_rev_id_create(wc->working->branch,
+ svn_branch__root_eid(wc->working->branch),
+ SVN_INVALID_REVNUM, scratch_pool);
+ SVN_ERR(svnmover_branch_merge(wc->edit_txn, tgt->branch,
+ &wc->conflicts,
+ src, tgt, yca, wc->pool, scratch_pool));
+
+ if (svnmover_any_conflicts(wc->conflicts))
+ {
+ SVN_ERR(svnmover_display_conflicts(wc->conflicts, scratch_pool));
+ }
+
+ /* ### TODO: If the merge raises conflicts, allow the user to revert
+ to the pre-update state or resolve the conflicts. Currently
+ this leaves the merge partially done and the pre-update state
+ is lost. */
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ */
+static svn_error_t *
+do_merge(svnmover_wc_t *wc,
+ svn_branch__el_rev_id_t *src,
+ svn_branch__el_rev_id_t *tgt,
+ svn_branch__el_rev_id_t *yca,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__history_t *history;
+
+ if (src->eid != tgt->eid || src->eid != yca->eid)
+ {
+ svnmover_notify(_("Warning: root elements differ in the requested merge "
+ "(from: e%d, to: e%d, yca: e%d)"),
+ src->eid, tgt->eid, yca->eid);
+ }
+
+ SVN_ERR(svnmover_branch_merge(wc->edit_txn, tgt->branch,
+ &wc->conflicts,
+ src, tgt, yca,
+ wc->pool, scratch_pool));
+
+ /* Update the history */
+ SVN_ERR(svn_branch__state_get_history(tgt->branch, &history, scratch_pool));
+ /* ### Assume this was a complete merge -- i.e. all changes up to YCA were
+ previously merged, so now SRC is a new parent. */
+ SVN_ERR(svn_branch__history_add_parent(history, src->rev, src->branch->bid,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_set_history(tgt->branch, history, scratch_pool));
+ svnmover_notify_v(_("--- recorded merge parent as: %ld.%s"),
+ src->rev, src->branch->bid);
+
+ if (svnmover_any_conflicts(wc->conflicts))
+ {
+ SVN_ERR(svnmover_display_conflicts(wc->conflicts, scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ */
+static svn_error_t *
+do_auto_merge(svnmover_wc_t *wc,
+ svn_branch__el_rev_id_t *src,
+ svn_branch__el_rev_id_t *tgt,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__rev_bid_t *yca;
+
+ /* Find the Youngest Common Ancestor.
+ ### TODO */
+ yca = NULL;
+
+ if (yca)
+ {
+ svn_branch__repos_t *repos = wc->working->branch->txn->repos;
+ svn_branch__state_t *yca_branch;
+ svn_branch__el_rev_id_t *_yca;
+
+ SVN_ERR(svn_branch__repos_get_branch_by_id(&yca_branch, repos,
+ yca->rev, yca->bid,
+ scratch_pool));
+ _yca = svn_branch__el_rev_id_create(yca_branch,
+ svn_branch__root_eid(yca_branch),
+ yca->rev, scratch_pool);
+
+ SVN_ERR(do_merge(wc, src, tgt, _yca, scratch_pool));
+ }
+ else
+ {
+ return svn_error_create(SVN_BRANCH__ERR, NULL,
+ _("Cannot perform automatic merge: "
+ "no YCA found"));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Show the difference in history metadata between BRANCH1 and BRANCH2.
+ *
+ * If HEADER is non-null, print *HEADER and then set *HEADER to null.
+ *
+ * BRANCH1 and/or BRANCH2 may be null.
+ */
+static svn_error_t *
+show_history_r(svn_branch__state_t *branch,
+ const char *prefix,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__history_t *history = NULL;
+ svn_branch__subtree_t *subtree = NULL;
+ apr_hash_index_t *hi;
+
+ if (! branch)
+ return SVN_NO_ERROR;
+
+ SVN_ERR(svn_branch__state_get_history(branch, &history, scratch_pool));
+ svnmover_notify("%s%s: %s", prefix,
+ branch->bid, history_str(history, scratch_pool));
+
+ /* recurse into each subbranch */
+ SVN_ERR(svn_branch__get_subtree(branch, &subtree,
+ svn_branch__root_eid(branch),
+ scratch_pool));
+ for (hi = apr_hash_first(scratch_pool, subtree->subbranches);
+ hi; hi = apr_hash_next(hi))
+ {
+ int e = svn_eid__hash_this_key(hi);
+ svn_branch__state_t *subbranch = NULL;
+
+ SVN_ERR(svn_branch__get_subbranch_at_eid(branch, &subbranch, e,
+ scratch_pool));
+ if (subbranch)
+ {
+ SVN_ERR(show_history_r(subbranch, prefix, scratch_pool));
+ }
+ }
+ return SVN_NO_ERROR;
+}
+
+/* */
+typedef struct diff_item_t
+{
+ int eid;
+ svn_element__content_t *e0, *e1;
+ const char *relpath0, *relpath1;
+ svn_boolean_t modified, reparented, renamed;
+} diff_item_t;
+
+/* Return differences between branch subtrees S_LEFT and S_RIGHT.
+ * Diff the union of S_LEFT's and S_RIGHT's elements.
+ *
+ * Set *DIFF_CHANGES to a hash of (eid -> diff_item_t).
+ *
+ * ### This requires 'subtrees' only in order to produce the 'relpath'
+ * fields in the output. Other than that, it would work with arbitrary
+ * sets of elements.
+ */
+static svn_error_t *
+subtree_diff(apr_hash_t **diff_changes,
+ svn_branch__subtree_t *s_left,
+ svn_branch__subtree_t *s_right,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *diff_left_right;
+ apr_hash_index_t *hi;
+
+ *diff_changes = apr_hash_make(result_pool);
+
+ SVN_ERR(svnmover_element_differences(&diff_left_right,
+ s_left->tree, s_right->tree,
+ NULL /*union of s_left & s_right*/,
+ result_pool, scratch_pool));
+
+ for (hi = apr_hash_first(scratch_pool, diff_left_right);
+ hi; hi = apr_hash_next(hi))
+ {
+ int eid = svn_eid__hash_this_key(hi);
+ svn_element__content_t **e_pair = apr_hash_this_val(hi);
+ svn_element__content_t *e0 = e_pair[0], *e1 = e_pair[1];
+
+ if (e0 || e1)
+ {
+ diff_item_t *item = apr_palloc(result_pool, sizeof(*item));
+
+ item->eid = eid;
+ item->e0 = e0;
+ item->e1 = e1;
+ item->relpath0 = e0 ? svn_element__tree_get_path_by_eid(
+ s_left->tree, eid, result_pool) : NULL;
+ item->relpath1 = e1 ? svn_element__tree_get_path_by_eid(
+ s_right->tree, eid, result_pool) : NULL;
+ item->reparented = (e0 && e1 && e0->parent_eid != e1->parent_eid);
+ item->renamed = (e0 && e1 && strcmp(e0->name, e1->name) != 0);
+
+ svn_eid__hash_set(*diff_changes, eid, item);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Find the relative order of diff items A and B, according to the
+ * "major path" of each. The major path means its right-hand relpath, if
+ * it exists on the right-hand side of the diff, else its left-hand relpath.
+ *
+ * Return negative/zero/positive when A sorts before/equal-to/after B.
+ */
+static int
+diff_ordering_major_paths(const struct svn_sort__item_t *a,
+ const struct svn_sort__item_t *b)
+{
+ const diff_item_t *item_a = a->value, *item_b = b->value;
+ int deleted_a = (item_a->e0 && ! item_a->e1);
+ int deleted_b = (item_b->e0 && ! item_b->e1);
+ const char *major_path_a = (item_a->e1 ? item_a->relpath1 : item_a->relpath0);
+ const char *major_path_b = (item_b->e1 ? item_b->relpath1 : item_b->relpath0);
+
+ /* Sort deleted items before all others */
+ if (deleted_a != deleted_b)
+ return deleted_b - deleted_a;
+
+ /* Sort by path */
+ return svn_path_compare_paths(major_path_a, major_path_b);
+}
+
+/* Display differences between subtrees LEFT and RIGHT, which are subtrees
+ * of branches LEFT_BID and RIGHT_BID respectively.
+ *
+ * Diff the union of LEFT's and RIGHT's elements.
+ *
+ * Use EDITOR to fetch content when needed.
+ *
+ * Write a line containing HEADER before any other output, if it is not
+ * null. Write PREFIX at the start of each line of output, including any
+ * header line. PREFIX and HEADER should contain no end-of-line characters.
+ *
+ * The output refers to paths or to elements according to THE_UI_MODE.
+ */
+static svn_error_t *
+show_subtree_diff(svn_branch__subtree_t *left,
+ const char *left_bid,
+ svn_branch__subtree_t *right,
+ const char *right_bid,
+ const char *prefix,
+ const char *header,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *diff_changes;
+ svn_eid__hash_iter_t *ei;
+
+ SVN_ERR_ASSERT(left && left->tree->root_eid != -1
+ && right && right->tree->root_eid != -1);
+
+ SVN_ERR(subtree_diff(&diff_changes, left, right,
+ scratch_pool, scratch_pool));
+
+ if (header && apr_hash_count(diff_changes))
+ svnmover_notify("%s%s", prefix, header);
+
+ for (SVN_EID__HASH_ITER_SORTED(ei, diff_changes,
+ (the_ui_mode == UI_MODE_EIDS)
+ ? sort_compare_items_by_eid
+ : diff_ordering_major_paths,
+ scratch_pool))
+ {
+ diff_item_t *item = ei->val;
+ svn_element__content_t *e0 = item->e0, *e1 = item->e1;
+ char status_mod = (e0 && e1) ? 'M' : e0 ? 'D' : 'A';
+
+ /* For a deleted element whose parent was also deleted, mark it is
+ less interesting, somehow. (Or we could omit it entirely.) */
+ if (status_mod == 'D')
+ {
+ diff_item_t *parent_item
+ = svn_eid__hash_get(diff_changes, e0->parent_eid);
+
+ if (parent_item && ! parent_item->e1)
+ status_mod = 'd';
+ }
+
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ const char *major_path = (e1 ? item->relpath1 : item->relpath0);
+ const char *from = "";
+
+ if (item->reparented || item->renamed)
+ {
+ if (! item->reparented)
+ from = apr_psprintf(scratch_pool,
+ " (renamed from .../%s)",
+ e0->name);
+ else if (! item->renamed)
+ from = apr_psprintf(scratch_pool,
+ " (moved from %s/...)",
+ svn_relpath_dirname(item->relpath0,
+ scratch_pool));
+ else
+ from = apr_psprintf(scratch_pool,
+ " (moved+renamed from %s)",
+ item->relpath0);
+ }
+ svnmover_notify("%s%c%c%c %s%s%s",
+ prefix,
+ status_mod,
+ item->reparented ? 'v' : ' ',
+ item->renamed ? 'r' : ' ',
+ major_path,
+ subtree_subbranch_str(e0 ? left : right,
+ e0 ? left_bid : right_bid,
+ item->eid, scratch_pool),
+ from);
+ }
+ else
+ {
+ svnmover_notify("%s%c%c%c e%-3d %s%s%s%s%s",
+ prefix,
+ status_mod,
+ item->reparented ? 'v' : ' ',
+ item->renamed ? 'r' : ' ',
+ item->eid,
+ e1 ? peid_name(e1, scratch_pool) : "",
+ subtree_subbranch_str(e0 ? left : right,
+ e0 ? left_bid : right_bid,
+ item->eid, scratch_pool),
+ e0 && e1 ? " (from " : "",
+ e0 ? peid_name(e0, scratch_pool) : "",
+ e0 && e1 ? ")" : "");
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+typedef svn_error_t *
+svn_branch__diff_func_t(svn_branch__subtree_t *left,
+ const char *left_bid,
+ svn_branch__subtree_t *right,
+ const char *right_bid,
+ const char *prefix,
+ const char *header,
+ apr_pool_t *scratch_pool);
+
+/* Display differences between subtrees LEFT and RIGHT.
+ *
+ * Recurse into sub-branches.
+ */
+static svn_error_t *
+subtree_diff_r(svn_branch__state_t *left_branch,
+ int left_root_eid,
+ svn_branch__state_t *right_branch,
+ int right_root_eid,
+ svn_branch__diff_func_t diff_func,
+ const char *prefix,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__subtree_t *left = NULL;
+ svn_branch__subtree_t *right = NULL;
+ const char *left_str
+ = left_branch
+ ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
+ left_branch->bid, left_root_eid,
+ svn_branch__get_root_rrpath(left_branch, scratch_pool))
+ : NULL;
+ const char *right_str
+ = right_branch
+ ? apr_psprintf(scratch_pool, "%s:e%d at /%s",
+ right_branch->bid, right_root_eid,
+ svn_branch__get_root_rrpath(right_branch, scratch_pool))
+ : NULL;
+ const char *header;
+ apr_hash_t *subbranches_l, *subbranches_r, *subbranches_all;
+ apr_hash_index_t *hi;
+
+ if (left_branch)
+ {
+ SVN_ERR(svn_branch__get_subtree(left_branch, &left, left_root_eid,
+ scratch_pool));
+ }
+ if (right_branch)
+ {
+ SVN_ERR(svn_branch__get_subtree(right_branch, &right, right_root_eid,
+ scratch_pool));
+ }
+
+ if (!left)
+ {
+ header = apr_psprintf(scratch_pool,
+ "--- added branch %s",
+ right_str);
+ svnmover_notify("%s%s", prefix, header);
+ }
+ else if (!right)
+ {
+ header = apr_psprintf(scratch_pool,
+ "--- deleted branch %s",
+ left_str);
+ svnmover_notify("%s%s", prefix, header);
+ }
+ else
+ {
+ if (strcmp(left_str, right_str) == 0)
+ {
+ header = apr_psprintf(
+ scratch_pool, "--- diff branch %s",
+ left_str);
+ }
+ else
+ {
+ header = apr_psprintf(
+ scratch_pool, "--- diff branch %s : %s",
+ left_str, right_str);
+ }
+ SVN_ERR(diff_func(left, left_branch->bid, right, right_branch->bid,
+ prefix, header,
+ scratch_pool));
+ }
+
+ /* recurse into each subbranch that exists in LEFT and/or in RIGHT */
+ subbranches_l = left ? left->subbranches : apr_hash_make(scratch_pool);
+ subbranches_r = right ? right->subbranches : apr_hash_make(scratch_pool);
+ subbranches_all = hash_overlay(subbranches_l, subbranches_r);
+
+ for (hi = apr_hash_first(scratch_pool, subbranches_all);
+ hi; hi = apr_hash_next(hi))
+ {
+ int e = svn_eid__hash_this_key(hi);
+ svn_branch__state_t *left_subbranch = NULL, *right_subbranch = NULL;
+ int left_subbranch_eid = -1, right_subbranch_eid = -1;
+
+ /* recurse */
+ if (left_branch)
+ {
+ SVN_ERR(svn_branch__get_subbranch_at_eid(left_branch, &left_subbranch, e,
+ scratch_pool));
+ if (left_subbranch)
+ {
+ left_subbranch_eid = svn_branch__root_eid(left_subbranch);
+ }
+ }
+ if (right_branch)
+ {
+ SVN_ERR(svn_branch__get_subbranch_at_eid(right_branch, &right_subbranch, e,
+ scratch_pool));
+ if (right_subbranch)
+ {
+ right_subbranch_eid = svn_branch__root_eid(right_subbranch);
+ }
+ }
+ SVN_ERR(subtree_diff_r(left_subbranch, left_subbranch_eid,
+ right_subbranch, right_subbranch_eid,
+ diff_func, prefix, scratch_pool));
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Display differences between branch subtrees LEFT and RIGHT.
+ *
+ * Recurse into sub-branches.
+ */
+static svn_error_t *
+branch_diff_r(svn_branch__el_rev_id_t *left,
+ svn_branch__el_rev_id_t *right,
+ svn_branch__diff_func_t diff_func,
+ const char *prefix,
+ apr_pool_t *scratch_pool)
+{
+ SVN_ERR(subtree_diff_r(left->branch, left->eid,
+ right->branch, right->eid,
+ diff_func, prefix, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+do_copy(svn_branch__el_rev_id_t *from_el_rev,
+ svn_branch__state_t *to_branch,
+ svn_branch__eid_t to_parent_eid,
+ const char *new_name,
+ apr_pool_t *scratch_pool)
+{
+ const char *from_branch_id = svn_branch__get_id(from_el_rev->branch,
+ scratch_pool);
+ svn_branch__rev_bid_eid_t *src_el_rev
+ = svn_branch__rev_bid_eid_create(from_el_rev->rev, from_branch_id,
+ from_el_rev->eid, scratch_pool);
+ const char *from_path = el_rev_id_to_path(from_el_rev, scratch_pool);
+ const char *to_path = branch_peid_name_to_path(to_branch, to_parent_eid,
+ new_name, scratch_pool);
+
+ SVN_ERR(svn_branch__state_copy_tree(to_branch,
+ src_el_rev, to_parent_eid, new_name,
+ scratch_pool));
+ svnmover_notify_v("A+ %s (from %s)",
+ to_path, from_path);
+
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+do_delete(svn_branch__state_t *branch,
+ svn_branch__eid_t eid,
+ apr_pool_t *scratch_pool)
+{
+ const char *path = svn_branch__get_rrpath_by_eid(branch, eid, scratch_pool);
+
+ SVN_ERR(svn_branch__state_delete_one(branch, eid, scratch_pool));
+ svnmover_notify_v("D %s", path);
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+do_mkdir(svn_branch__txn_t *txn,
+ svn_branch__state_t *to_branch,
+ svn_branch__eid_t to_parent_eid,
+ const char *new_name,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *props = apr_hash_make(scratch_pool);
+ svn_element__payload_t *payload
+ = svn_element__payload_create_dir(props, scratch_pool);
+ int new_eid;
+ const char *path = branch_peid_name_to_path(to_branch, to_parent_eid,
+ new_name, scratch_pool);
+
+ SVN_ERR(svn_branch__txn_new_eid(txn, &new_eid, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(to_branch, new_eid,
+ to_parent_eid, new_name, payload,
+ scratch_pool));
+ svnmover_notify_v("A %s",
+ path);
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+do_put_file(svn_branch__txn_t *txn,
+ const char *local_file_path,
+ svn_branch__el_rev_id_t *file_el_rev,
+ svn_branch__el_rev_id_t *parent_el_rev,
+ const char *file_name,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *props;
+ svn_stringbuf_t *text;
+ int parent_eid;
+ const char *name;
+ svn_element__payload_t *payload;
+
+ if (file_el_rev->eid != -1)
+ {
+ /* get existing props */
+ svn_element__content_t *existing_element;
+
+ SVN_ERR(svn_branch__state_get_element(file_el_rev->branch,
+ &existing_element,
+ file_el_rev->eid, scratch_pool));
+ props = existing_element->payload->props;
+ }
+ else
+ {
+ props = apr_hash_make(scratch_pool);
+ }
+ /* read new text from file */
+ {
+ svn_stream_t *src;
+
+ if (strcmp(local_file_path, "-") != 0)
+ SVN_ERR(svn_stream_open_readonly(&src, local_file_path,
+ scratch_pool, scratch_pool));
+ else
+ SVN_ERR(svn_stream_for_stdin2(&src, FALSE, scratch_pool));
+
+ svn_stringbuf_from_stream(&text, src, 0, scratch_pool);
+ }
+ payload = svn_element__payload_create_file(props, text, scratch_pool);
+
+ if (is_branch_root_element(file_el_rev->branch,
+ file_el_rev->eid))
+ {
+ parent_eid = -1;
+ name = "";
+ }
+ else
+ {
+ parent_eid = parent_el_rev->eid;
+ name = file_name;
+ }
+
+ if (file_el_rev->eid != -1)
+ {
+ const char *path = el_rev_id_to_path(file_el_rev, scratch_pool);
+
+ SVN_ERR(svn_branch__state_alter_one(file_el_rev->branch, file_el_rev->eid,
+ parent_eid, name, payload,
+ scratch_pool));
+ svnmover_notify_v("M %s",
+ path);
+ }
+ else
+ {
+ int new_eid;
+ const char *path
+ = branch_peid_name_to_path(parent_el_rev->branch, parent_eid, name,
+ scratch_pool);
+
+ SVN_ERR(svn_branch__txn_new_eid(txn, &new_eid, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(parent_el_rev->branch, new_eid,
+ parent_eid, name, payload,
+ scratch_pool));
+ file_el_rev->eid = new_eid;
+ svnmover_notify_v("A %s",
+ path);
+ }
+ return SVN_NO_ERROR;
+}
+
+/* */
+static svn_error_t *
+do_cat(svn_branch__el_rev_id_t *file_el_rev,
+ apr_pool_t *scratch_pool)
+{
+ apr_hash_t *props;
+ svn_stringbuf_t *text;
+ svn_element__content_t *existing_element;
+ apr_hash_index_t *hi;
+
+ /* get existing props */
+ SVN_ERR(svn_branch__state_get_element(file_el_rev->branch, &existing_element,
+ file_el_rev->eid, scratch_pool));
+
+ props = existing_element->payload->props;
+ text = existing_element->payload->text;
+
+ for (hi = apr_hash_first(scratch_pool, props); hi; hi = apr_hash_next(hi))
+ {
+ const char *pname = apr_hash_this_key(hi);
+ svn_string_t *pval = apr_hash_this_val(hi);
+
+ svnmover_notify("property '%s': '%s'", pname, pval->data);
+ }
+ if (text)
+ {
+ svnmover_notify("%s", text->data);
+ }
+ return SVN_NO_ERROR;
+}
+
+/* Find the main parent of branch-state BRANCH. That means:
+ * - the only parent (in the case of straight history or branching), else
+ * - the parent with the same branch id (in the case of normal merging), else
+ * - none (in the case of a new unrelated branch, or a new branch formed
+ * by merging two or more other branches).
+ */
+static svn_error_t *
+find_branch_main_parent(svn_branch__state_t *branch,
+ svn_branch__rev_bid_t **predecessor_p,
+ apr_pool_t *result_pool)
+{
+ svn_branch__history_t *history;
+ svn_branch__rev_bid_t *our_own_history;
+ svn_branch__rev_bid_t *predecessor = NULL;
+
+ SVN_ERR(svn_branch__state_get_history(branch, &history, result_pool));
+ if (apr_hash_count(history->parents) == 1)
+ {
+ apr_hash_index_t *hi = apr_hash_first(result_pool, history->parents);
+
+ predecessor = apr_hash_this_val(hi);
+ }
+ else if ((our_own_history = svn_hash_gets(history->parents, branch->bid)))
+ {
+ predecessor = our_own_history;
+ }
+
+ if (predecessor_p)
+ *predecessor_p = predecessor;
+ return SVN_NO_ERROR;
+}
+
+/* Set *NEW_EL_REV_P to the location where OLD_EL_REV was in the previous
+ * revision. Follow the "main line" of any branching in its history.
+ *
+ * If the same EID...
+ */
+static svn_error_t *
+svn_branch__find_predecessor_el_rev(svn_branch__el_rev_id_t **new_el_rev_p,
+ svn_branch__el_rev_id_t *old_el_rev,
+ apr_pool_t *result_pool)
+{
+ const svn_branch__repos_t *repos = old_el_rev->branch->txn->repos;
+ svn_branch__rev_bid_t *predecessor;
+ svn_branch__state_t *branch;
+
+ SVN_ERR(find_branch_main_parent(old_el_rev->branch,
+ &predecessor, result_pool));
+ if (! predecessor)
+ {
+ *new_el_rev_p = NULL;
+ return SVN_NO_ERROR;
+ }
+
+ SVN_ERR(svn_branch__repos_get_branch_by_id(&branch,
+ repos, predecessor->rev,
+ predecessor->bid, result_pool));
+ *new_el_rev_p = svn_branch__el_rev_id_create(branch, old_el_rev->eid,
+ predecessor->rev, result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Similar to 'svn log -v', this iterates over the revisions between
+ * LEFT and RIGHT (currently excluding LEFT), printing a single-rev diff
+ * for each.
+ */
+static svn_error_t *
+do_log(svn_branch__el_rev_id_t *left,
+ svn_branch__el_rev_id_t *right,
+ apr_pool_t *scratch_pool)
+{
+ svn_revnum_t first_rev = left->rev;
+
+ while (right->rev > first_rev)
+ {
+ svn_branch__el_rev_id_t *el_rev_left;
+
+ SVN_ERR(svn_branch__find_predecessor_el_rev(&el_rev_left, right, scratch_pool));
+
+ svnmover_notify(SVN_CL__LOG_SEP_STRING "r%ld | ...",
+ right->rev);
+ svnmover_notify("History:");
+ SVN_ERR(show_history_r(right->branch, " ", scratch_pool));
+ svnmover_notify("Changed elements:");
+ SVN_ERR(branch_diff_r(el_rev_left, right,
+ show_subtree_diff, " ",
+ scratch_pool));
+ right = el_rev_left;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Make a subbranch at OUTER_BRANCH : OUTER_PARENT_EID : OUTER_NAME.
+ *
+ * The subbranch will consist of a single element given by PAYLOAD.
+ */
+static svn_error_t *
+do_mkbranch(const char **new_branch_id_p,
+ svn_branch__txn_t *txn,
+ svn_branch__state_t *outer_branch,
+ int outer_parent_eid,
+ const char *outer_name,
+ svn_element__payload_t *payload,
+ apr_pool_t *scratch_pool)
+{
+ const char *outer_branch_id = svn_branch__get_id(outer_branch, scratch_pool);
+ int new_outer_eid, new_inner_eid;
+ const char *new_branch_id;
+ svn_branch__state_t *new_branch;
+ const char *path = branch_peid_name_to_path(outer_branch, outer_parent_eid,
+ outer_name, scratch_pool);
+
+ SVN_ERR(svn_branch__txn_new_eid(txn, &new_outer_eid, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(outer_branch, new_outer_eid,
+ outer_parent_eid, outer_name,
+ svn_element__payload_create_subbranch(
+ scratch_pool), scratch_pool));
+
+ SVN_ERR(svn_branch__txn_new_eid(txn, &new_inner_eid, scratch_pool));
+ new_branch_id = svn_branch__id_nest(outer_branch_id, new_outer_eid,
+ scratch_pool);
+ SVN_ERR(svn_branch__txn_open_branch(txn, &new_branch,
+ new_branch_id, new_inner_eid,
+ NULL /*tree_ref*/,
+ scratch_pool, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(new_branch, new_inner_eid,
+ -1, "", payload, scratch_pool));
+
+ svnmover_notify_v("A %s (branch %s)",
+ path,
+ new_branch->bid);
+ if (new_branch_id_p)
+ *new_branch_id_p = new_branch->bid;
+ return SVN_NO_ERROR;
+}
+
+/* Branch all or part of an existing branch, making a new branch.
+ *
+ * Branch the subtree of FROM_BRANCH found at FROM_EID, to create
+ * a new branch at TO_OUTER_BRANCH:TO_OUTER_PARENT_EID:NEW_NAME.
+ *
+ * FROM_BRANCH:FROM_EID must be an existing element. It may be the
+ * root of FROM_BRANCH. It must not be the root of a subbranch of
+ * FROM_BRANCH.
+ *
+ * TO_OUTER_BRANCH:TO_OUTER_PARENT_EID must be an existing directory
+ * and NEW_NAME must be nonexistent in that directory.
+ */
+static svn_error_t *
+do_branch(svn_branch__state_t **new_branch_p,
+ svn_branch__txn_t *txn,
+ svn_branch__rev_bid_eid_t *from,
+ svn_branch__state_t *to_outer_branch,
+ svn_branch__eid_t to_outer_parent_eid,
+ const char *new_name,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ const char *to_outer_branch_id
+ = to_outer_branch ? svn_branch__get_id(to_outer_branch, scratch_pool) : NULL;
+ int to_outer_eid;
+ const char *new_branch_id;
+ svn_branch__state_t *new_branch;
+ svn_branch__history_t *history;
+ const char *to_path
+ = branch_peid_name_to_path(to_outer_branch,
+ to_outer_parent_eid, new_name, scratch_pool);
+
+ /* assign new eid to root element (outer branch) */
+ SVN_ERR(svn_branch__txn_new_eid(txn, &to_outer_eid, scratch_pool));
+
+ new_branch_id = svn_branch__id_nest(to_outer_branch_id, to_outer_eid,
+ scratch_pool);
+ SVN_ERR(svn_branch__txn_open_branch(txn, &new_branch,
+ new_branch_id, from->eid, from,
+ result_pool, scratch_pool));
+ history = svn_branch__history_create_empty(scratch_pool);
+ SVN_ERR(svn_branch__history_add_parent(history, from->rev, from->bid,
+ scratch_pool));
+ SVN_ERR(svn_branch__state_set_history(new_branch, history, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(to_outer_branch, to_outer_eid,
+ to_outer_parent_eid, new_name,
+ svn_element__payload_create_subbranch(
+ scratch_pool), scratch_pool));
+
+ svnmover_notify_v("A+ %s (branch %s)",
+ to_path,
+ new_branch->bid);
+
+ if (new_branch_p)
+ *new_branch_p = new_branch;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+do_topbranch(svn_branch__state_t **new_branch_p,
+ svn_branch__txn_t *txn,
+ svn_branch__rev_bid_eid_t *from,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ int outer_eid;
+ const char *new_branch_id;
+ svn_branch__state_t *new_branch;
+
+ SVN_ERR(svn_branch__txn_new_eid(txn, &outer_eid, scratch_pool));
+ new_branch_id = svn_branch__id_nest(NULL /*outer_branch*/, outer_eid,
+ scratch_pool);
+ SVN_ERR(svn_branch__txn_open_branch(txn, &new_branch,
+ new_branch_id, from->eid, from,
+ result_pool, scratch_pool));
+
+ svnmover_notify_v("A+ (branch %s)",
+ new_branch->bid);
+
+ if (new_branch_p)
+ *new_branch_p = new_branch;
+ return SVN_NO_ERROR;
+}
+
+/* Branch the subtree of FROM_BRANCH found at FROM_EID, to appear
+ * in the existing branch TO_BRANCH at TO_PARENT_EID:NEW_NAME.
+ *
+ * This is like merging the creation of the source subtree into TO_BRANCH.
+ *
+ * Any elements of the source subtree that already exist in TO_BRANCH
+ * are altered. This is like resolving any merge conflicts as 'theirs'.
+ *
+ * (### Sometimes the user might prefer that we throw an error if any
+ * element of the source subtree already exists in TO_BRANCH.)
+ */
+static svn_error_t *
+do_branch_into(svn_branch__state_t *from_branch,
+ int from_eid,
+ svn_branch__state_t *to_branch,
+ svn_branch__eid_t to_parent_eid,
+ const char *new_name,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__subtree_t *from_subtree;
+ svn_element__content_t *new_root_content;
+ const char *to_path = branch_peid_name_to_path(to_branch, to_parent_eid,
+ new_name, scratch_pool);
+
+ /* Source element must exist */
+ if (! svn_branch__get_path_by_eid(from_branch, from_eid, scratch_pool))
+ {
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("Cannot branch from %s e%d: "
+ "does not exist"),
+ svn_branch__get_id(
+ from_branch, scratch_pool), from_eid);
+ }
+
+ SVN_ERR(svn_branch__get_subtree(from_branch, &from_subtree, from_eid,
+ scratch_pool));
+
+ /* Change this subtree's root element to TO_PARENT_EID/NEW_NAME. */
+ new_root_content
+ = svn_element__tree_get(from_subtree->tree, from_subtree->tree->root_eid);
+ new_root_content
+ = svn_element__content_create(to_parent_eid, new_name,
+ new_root_content->payload, scratch_pool);
+ svn_element__tree_set(from_subtree->tree, from_subtree->tree->root_eid,
+ new_root_content);
+
+ /* Populate the new branch mapping */
+ SVN_ERR(svn_branch__instantiate_elements_r(to_branch, *from_subtree,
+ scratch_pool));
+ svnmover_notify_v("A+ %s (subtree)",
+ to_path);
+
+ return SVN_NO_ERROR;
+}
+
+/* Copy-and-delete.
+ *
+ * copy the subtree at EL_REV to TO_BRANCH:TO_PARENT_EID:TO_NAME
+ * delete the subtree at EL_REV
+ */
+static svn_error_t *
+do_copy_and_delete(svn_branch__el_rev_id_t *el_rev,
+ svn_branch__state_t *to_branch,
+ int to_parent_eid,
+ const char *to_name,
+ apr_pool_t *scratch_pool)
+{
+ const char *from_path
+ = svn_branch__get_rrpath_by_eid(el_rev->branch, el_rev->eid, scratch_pool);
+
+ SVN_ERR_ASSERT(! is_branch_root_element(el_rev->branch, el_rev->eid));
+
+ SVN_ERR(do_copy(el_rev, to_branch, to_parent_eid, to_name,
+ scratch_pool));
+
+ SVN_ERR(svn_branch__state_delete_one(el_rev->branch, el_rev->eid,
+ scratch_pool));
+ svnmover_notify_v("D %s", from_path);
+
+ return SVN_NO_ERROR;
+}
+
+/* Branch-and-delete.
+ *
+ * branch the subtree at EL_REV creating a new nested branch at
+ * TO_BRANCH:TO_PARENT_EID:TO_NAME,
+ * or creating a new top-level branch if TO_BRANCH is null;
+ * delete the subtree at EL_REV
+ */
+static svn_error_t *
+do_branch_and_delete(svn_branch__txn_t *edit_txn,
+ svn_branch__el_rev_id_t *el_rev,
+ svn_branch__state_t *to_outer_branch,
+ int to_outer_parent_eid,
+ const char *to_name,
+ apr_pool_t *scratch_pool)
+{
+ const char *from_branch_id = svn_branch__get_id(el_rev->branch,
+ scratch_pool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(el_rev->rev, from_branch_id,
+ el_rev->eid, scratch_pool);
+ svn_branch__state_t *new_branch;
+ const char *from_path
+ = svn_branch__get_rrpath_by_eid(el_rev->branch, el_rev->eid, scratch_pool);
+
+ SVN_ERR_ASSERT(! is_branch_root_element(el_rev->branch, el_rev->eid));
+
+ SVN_ERR(do_branch(&new_branch, edit_txn, from,
+ to_outer_branch, to_outer_parent_eid, to_name,
+ scratch_pool, scratch_pool));
+
+ SVN_ERR(svn_branch__state_delete_one(el_rev->branch, el_rev->eid,
+ scratch_pool));
+ svnmover_notify_v("D %s", from_path);
+
+ return SVN_NO_ERROR;
+}
+
+/* Branch-into-and-delete.
+ *
+ * (Previously, confusingly, called 'branch-and-delete'.)
+ *
+ * The target branch is different from the source branch.
+ *
+ * delete elements from source branch
+ * instantiate (or update) same elements in target branch
+ *
+ * For each element being moved, if the element already exists in TO_BRANCH,
+ * the effect is as if the existing element in TO_BRANCH was first deleted.
+ */
+static svn_error_t *
+do_branch_into_and_delete(svn_branch__el_rev_id_t *el_rev,
+ svn_branch__state_t *to_branch,
+ int to_parent_eid,
+ const char *to_name,
+ apr_pool_t *scratch_pool)
+{
+ const char *from_path
+ = svn_branch__get_rrpath_by_eid(el_rev->branch, el_rev->eid, scratch_pool);
+
+ SVN_ERR_ASSERT(! is_branch_root_element(el_rev->branch, el_rev->eid));
+
+ /* This is supposed to be used for moving to a *different* branch.
+ In fact, this method would also work for moving within one
+ branch, but we don't currently want to use it for that purpose. */
+ SVN_ERR_ASSERT(! BRANCH_IS_SAME_BRANCH(el_rev->branch, to_branch,
+ scratch_pool));
+
+ /* Merge the "creation of the source" to the target (aka branch-into) */
+ SVN_ERR(do_branch_into(el_rev->branch, el_rev->eid,
+ to_branch, to_parent_eid, to_name,
+ scratch_pool));
+
+ SVN_ERR(svn_branch__state_delete_one(el_rev->branch, el_rev->eid,
+ scratch_pool));
+ svnmover_notify_v("D %s", from_path);
+
+ return SVN_NO_ERROR;
+}
+
+/* Interactive options for moving to another branch.
+ */
+static svn_error_t *
+do_interactive_cross_branch_move(svn_branch__txn_t *txn,
+ svn_branch__el_rev_id_t *el_rev,
+ svn_branch__el_rev_id_t *to_parent_el_rev,
+ const char *to_name,
+ apr_pool_t *scratch_pool)
+{
+ svn_error_t *err;
+ const char *input;
+
+ if (0 /*### if non-interactive*/)
+ {
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("mv: The source and target are in different branches. "
+ "Some ways to move content to a different branch are, "
+ "depending on the effect you want to achieve: "
+ "copy-and-delete, branch-and-delete, branch-into-and-delete"));
+ }
+
+ svnmover_notify_v(
+ _("mv: The source and target are in different branches. "
+ "Some ways to move content to a different branch are, "
+ "depending on the effect you want to achieve:\n"
+ " c: copy-and-delete: cp SOURCE TARGET; rm SOURCE\n"
+ " b: branch-and-delete: branch SOURCE TARGET; rm SOURCE\n"
+ " i: branch-into-and-delete: branch-into SOURCE TARGET; rm SOURCE\n"
+ "We can do one of these for you now if you wish.\n"
+ ));
+
+ settext_stderr(TEXT_FG_YELLOW);
+ err = svn_cmdline_prompt_user2(
+ &input,
+ "Your choice (c, b, i, or just <enter> to do nothing): ",
+ NULL, scratch_pool);
+ settext(TEXT_RESET);
+ if (err && (err->apr_err == SVN_ERR_CANCELLED || err->apr_err == APR_EOF))
+ {
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+ }
+ SVN_ERR(err);
+
+ if (input[0] == 'c' || input[0] == 'C')
+ {
+ svnmover_notify_v("Performing 'copy-and-delete SOURCE TARGET'");
+
+ SVN_ERR(do_copy_and_delete(el_rev,
+ to_parent_el_rev->branch,
+ to_parent_el_rev->eid, to_name,
+ scratch_pool));
+ }
+ else if (input[0] == 'b' || input[0] == 'B')
+ {
+ svnmover_notify_v("Performing 'branch-and-delete SOURCE TARGET'");
+
+ SVN_ERR(do_branch_and_delete(txn, el_rev,
+ to_parent_el_rev->branch,
+ to_parent_el_rev->eid, to_name,
+ scratch_pool));
+ }
+ else if (input[0] == 'i' || input[0] == 'I')
+ {
+ svnmover_notify_v("Performing 'branch-into-and-delete SOURCE TARGET'");
+ svnmover_notify_v(
+ "In the current implementation of this experimental UI, each element "
+ "instance from the source branch subtree will overwrite any instance "
+ "of the same element that already exists in the target branch."
+ );
+ /* We could instead either throw an error or fall back to copy-and-delete
+ if any moved element already exists in target branch. */
+
+ SVN_ERR(do_branch_into_and_delete(el_rev,
+ to_parent_el_rev->branch,
+ to_parent_el_rev->eid, to_name,
+ scratch_pool));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Move.
+ */
+static svn_error_t *
+do_move(svn_branch__el_rev_id_t *el_rev,
+ svn_branch__el_rev_id_t *to_parent_el_rev,
+ const char *to_name,
+ apr_pool_t *scratch_pool)
+{
+ const char *from_path = el_rev_id_to_path(el_rev, scratch_pool);
+ const char *to_path
+ = branch_peid_name_to_path(to_parent_el_rev->branch,
+ to_parent_el_rev->eid, to_name, scratch_pool);
+ /* New payload shall be the same as before */
+ svn_element__content_t *existing_element;
+
+ SVN_ERR(svn_branch__state_get_element(el_rev->branch, &existing_element,
+ el_rev->eid, scratch_pool));
+ SVN_ERR(svn_branch__state_alter_one(el_rev->branch, el_rev->eid,
+ to_parent_el_rev->eid, to_name,
+ existing_element->payload, scratch_pool));
+ svnmover_notify_v("V %s (from %s)",
+ to_path, from_path);
+ return SVN_NO_ERROR;
+}
+
+/* This commit callback prints not only a commit summary line but also
+ * a log-style summary of the changes.
+ */
+static svn_error_t *
+commit_callback(const svn_commit_info_t *commit_info,
+ void *baton,
+ apr_pool_t *pool)
+{
+ commit_callback_baton_t *b = baton;
+
+ svnmover_notify("Committed r%ld:", commit_info->revision);
+
+ b->revision = commit_info->revision;
+ return SVN_NO_ERROR;
+}
+
+/* Display a diff of the commit */
+static svn_error_t *
+display_diff_of_commit(const commit_callback_baton_t *ccbb,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__txn_t *previous_head_txn
+ = svn_branch__repos_get_base_revision_root(ccbb->edit_txn);
+ svn_branch__state_t *base_branch
+ = svn_branch__txn_get_branch_by_id(previous_head_txn,
+ ccbb->wc_base_branch_id,
+ scratch_pool);
+ svn_branch__state_t *committed_branch
+ = svn_branch__txn_get_branch_by_id(ccbb->edit_txn,
+ ccbb->wc_commit_branch_id,
+ scratch_pool);
+ svn_branch__el_rev_id_t *el_rev_left
+ = svn_branch__el_rev_id_create(base_branch, svn_branch__root_eid(base_branch),
+ base_branch->txn->rev,
+ scratch_pool);
+ svn_branch__el_rev_id_t *el_rev_right
+ = svn_branch__el_rev_id_create(committed_branch,
+ svn_branch__root_eid(committed_branch),
+ committed_branch->txn->rev,
+ scratch_pool);
+
+ SVN_ERR(branch_diff_r(el_rev_left, el_rev_right,
+ show_subtree_diff, " ",
+ scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+commit(svn_revnum_t *new_rev_p,
+ svnmover_wc_t *wc,
+ apr_hash_t *revprops,
+ apr_pool_t *scratch_pool)
+{
+ if (svnmover_any_conflicts(wc->conflicts))
+ {
+ return svn_error_createf(SVN_BRANCH__ERR, NULL,
+ _("Cannot commit because there are "
+ "unresolved conflicts"));
+ }
+
+ /* Complete the old edit drive (editing the WC working state) */
+ SVN_ERR(svn_branch__txn_sequence_point(wc->edit_txn, scratch_pool));
+
+ /* Just as in execute() the pool must be a subpool of wc->pool. */
+ SVN_ERR(wc_commit(new_rev_p, wc, revprops, wc->pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Commit.
+ *
+ * Set *NEW_REV_P to the committed revision number. Update the WC base of
+ * each committed element to that revision.
+ *
+ * If there are no changes to commit, set *NEW_REV_P to SVN_INVALID_REVNUM
+ * and do not make a commit.
+ *
+ * NEW_REV_P may be null if not wanted.
+ */
+static svn_error_t *
+do_commit(svn_revnum_t *new_rev_p,
+ svnmover_wc_t *wc,
+ apr_hash_t *revprops,
+ apr_pool_t *scratch_pool)
+{
+ svn_revnum_t new_rev;
+
+ SVN_ERR(commit(&new_rev, wc, revprops, scratch_pool));
+
+ if (new_rev_p)
+ *new_rev_p = new_rev;
+ return SVN_NO_ERROR;
+}
+
+/* Revert all uncommitted changes in WC.
+ */
+static svn_error_t *
+do_revert(svnmover_wc_t *wc,
+ apr_pool_t *scratch_pool)
+{
+ /* Replay the inverse of the current edit txn, into the current edit txn */
+ SVN_ERR(replay(wc->edit_txn, wc->working->branch,
+ wc->working->branch,
+ wc->base->branch,
+ scratch_pool));
+ wc->conflicts = NULL;
+
+ return SVN_NO_ERROR;
+}
+
+/* Migration replay baton */
+typedef struct migrate_replay_baton_t {
+ svn_branch__txn_t *edit_txn;
+ svn_ra_session_t *from_session;
+ /* Hash (by revnum) of array of svn_repos_move_info_t. */
+ apr_hash_t *moves;
+} migrate_replay_baton_t;
+
+/* Callback function for svn_ra_replay_range, invoked when starting to parse
+ * a replay report.
+ */
+static svn_error_t *
+migrate_replay_rev_started(svn_revnum_t revision,
+ void *replay_baton,
+ const svn_delta_editor_t **editor,
+ void **edit_baton,
+ apr_hash_t *rev_props,
+ apr_pool_t *pool)
+{
+ migrate_replay_baton_t *rb = replay_baton;
+ const svn_delta_editor_t *old_editor;
+ void *old_edit_baton;
+
+ svnmover_notify("migrate: start r%ld", revision);
+
+ SVN_ERR(svn_branch__compat_get_migration_editor(&old_editor, &old_edit_baton,
+ rb->edit_txn,
+ rb->from_session, revision,
+ pool));
+ SVN_ERR(svn_delta__get_debug_editor(&old_editor, &old_edit_baton,
+ old_editor, old_edit_baton,
+ "migrate: ", pool));
+
+ *editor = old_editor;
+ *edit_baton = old_edit_baton;
+
+ return SVN_NO_ERROR;
+}
+
+/* Callback function for svn_ra_replay_range, invoked when finishing parsing
+ * a replay report.
+ */
+static svn_error_t *
+migrate_replay_rev_finished(svn_revnum_t revision,
+ void *replay_baton,
+ const svn_delta_editor_t *editor,
+ void *edit_baton,
+ apr_hash_t *rev_props,
+ apr_pool_t *pool)
+{
+ migrate_replay_baton_t *rb = replay_baton;
+ apr_array_header_t *moves_in_revision
+ = apr_hash_get(rb->moves, &revision, sizeof(revision));
+
+ SVN_ERR(editor->close_edit(edit_baton, pool));
+
+ svnmover_notify("migrate: moves in revision r%ld:", revision);
+
+ if (moves_in_revision)
+ {
+ int i;
+
+ for (i = 0; i < moves_in_revision->nelts; i++)
+ {
+ svn_repos_move_info_t *this_move
+ = APR_ARRAY_IDX(moves_in_revision, i, void *);
+
+ if (this_move)
+ {
+ svnmover_notify("%s",
+ svn_client__format_move_chain_for_display(this_move,
+ "", pool));
+ }
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Migrate changes from non-move-tracking revisions.
+ */
+static svn_error_t *
+do_migrate(svnmover_wc_t *wc,
+ svn_revnum_t start_revision,
+ svn_revnum_t end_revision,
+ apr_pool_t *scratch_pool)
+{
+ migrate_replay_baton_t *rb = apr_pcalloc(scratch_pool, sizeof(*rb));
+
+ if (start_revision < 1 || end_revision < 1
+ || start_revision > end_revision
+ || end_revision > wc->head_revision)
+ {
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ _("migrate: Bad revision range (%ld to %ld); "
+ "minimum is 1 and maximum (head) is %ld"),
+ start_revision, end_revision,
+ wc->head_revision);
+ }
+
+ /* Scan the repository log for move info */
+ SVN_ERR(svn_client__get_repos_moves(&rb->moves,
+ "" /*(unused)*/,
+ wc->ra_session,
+ start_revision, end_revision,
+ wc->ctx, scratch_pool, scratch_pool));
+
+ rb->edit_txn = wc->edit_txn;
+ rb->from_session = wc->ra_session;
+ SVN_ERR(svn_ra_replay_range(rb->from_session,
+ start_revision, end_revision,
+ 0, TRUE,
+ migrate_replay_rev_started,
+ migrate_replay_rev_finished,
+ rb, scratch_pool));
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+show_branch_history(svn_branch__state_t *branch,
+ apr_pool_t *scratch_pool)
+{
+ svn_branch__history_t *history;
+ svn_branch__rev_bid_t *main_parent;
+ apr_hash_index_t *hi;
+
+ SVN_ERR(svn_branch__state_get_history(branch, &history, scratch_pool));
+
+ SVN_ERR(find_branch_main_parent(branch, &main_parent, scratch_pool));
+ if (main_parent)
+ {
+ if (strcmp(main_parent->bid, branch->bid) == 0)
+ {
+ svnmover_notify(" main parent: r%ld.%s",
+ main_parent->rev, main_parent->bid);
+ }
+ else
+ {
+ svnmover_notify(" main parent (branched from): r%ld.%s",
+ main_parent->rev, main_parent->bid);
+ }
+ }
+ for (hi = apr_hash_first(scratch_pool, history->parents);
+ hi; hi = apr_hash_next(hi))
+ {
+ svn_branch__rev_bid_t *parent = apr_hash_this_val(hi);
+
+ if (! svn_branch__rev_bid_equal(parent, main_parent))
+ {
+ svnmover_notify(" other parent (complete merge): r%ld.%s",
+ parent->rev, parent->bid);
+ }
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/* Show info about element E.
+ *
+ * TODO: Show different info for a repo element versus a WC element.
+ */
+static svn_error_t *
+do_info(svnmover_wc_t *wc,
+ svn_branch__el_rev_id_t *e,
+ apr_pool_t *scratch_pool)
+{
+ svnmover_notify("Element Id: %d%s",
+ e->eid,
+ is_branch_root_element(e->branch, e->eid)
+ ? " (branch root)" : "");
+
+ /* Show WC info for a WC working element, or repo info for a repo element */
+ if (e->rev == SVN_INVALID_REVNUM)
+ {
+ svn_branch__state_t *base_branch, *work_branch;
+ svn_revnum_t base_rev;
+ svn_element__content_t *e_base, *e_work;
+ svn_boolean_t is_modified;
+
+ base_branch = svn_branch__txn_get_branch_by_id(
+ wc->base->branch->txn, e->branch->bid, scratch_pool);
+ work_branch = svn_branch__txn_get_branch_by_id(
+ wc->working->branch->txn, e->branch->bid, scratch_pool);
+ base_rev = svnmover_wc_get_base_rev(wc, base_branch, e->eid, scratch_pool);
+ SVN_ERR(svn_branch__state_get_element(base_branch, &e_base,
+ e->eid, scratch_pool));
+ SVN_ERR(svn_branch__state_get_element(work_branch, &e_work,
+ e->eid, scratch_pool));
+ is_modified = !svn_element__content_equal(e_base, e_work,
+ scratch_pool);
+
+ svnmover_notify("Base Revision: %ld", base_rev);
+ svnmover_notify("Base Branch: %s", base_branch->bid);
+ svnmover_notify("Working Branch: %s", work_branch->bid);
+ svnmover_notify("Modified: %s", is_modified ? "yes" : "no");
+ }
+ else
+ {
+ svnmover_notify("Revision: %ld", e->rev);
+ svnmover_notify("Branch: %s", e->branch->bid);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+
+typedef struct arg_t
+{
+ const char *path_name;
+ svn_branch__el_rev_id_t *el_rev, *parent_el_rev;
+} arg_t;
+
+#define VERIFY_REV_SPECIFIED(op, i) \
+ if (arg[i]->el_rev->rev == SVN_INVALID_REVNUM) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: '%s': revision number required"), \
+ op, action->relpath[i]);
+
+#define VERIFY_REV_UNSPECIFIED(op, i) \
+ if (arg[i]->el_rev->rev != SVN_INVALID_REVNUM) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: '%s@...': revision number not allowed"), \
+ op, action->relpath[i]);
+
+#define VERIFY_EID_NONEXISTENT(op, i) \
+ if (arg[i]->el_rev->eid != -1) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: Element already exists at path '%s'"), \
+ op, action->relpath[i]);
+
+#define VERIFY_EID_EXISTS(op, i) \
+ if (arg[i]->el_rev->eid == -1) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: Element not found at path '%s%s'"), \
+ op, action->relpath[i], \
+ action->rev_spec[i].kind == svn_opt_revision_unspecified \
+ ? "" : "@...");
+
+#define VERIFY_PARENT_EID_EXISTS(op, i) \
+ if (arg[i]->parent_el_rev->eid == -1) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: Element not found at path '%s'"), \
+ op, svn_relpath_dirname(action->relpath[i], pool));
+
+#define VERIFY_NOT_CHILD_OF_SELF(op, i, j, pool) \
+ if (svn_relpath_skip_ancestor( \
+ svn_branch__get_rrpath_by_eid(arg[i]->el_rev->branch, \
+ arg[i]->el_rev->eid, pool), \
+ svn_branch__get_rrpath_by_eid(arg[j]->parent_el_rev->branch, \
+ arg[j]->parent_el_rev->eid, pool))) \
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, \
+ _("%s: The specified target is nested " \
+ "inside the source"), op);
+
+/* If EL_REV specifies the root element of a nested branch, change EL_REV
+ * to specify the corresponding subbranch-root element of its outer branch.
+ *
+ * If EL_REV specifies the root element of a top-level branch, return an
+ * error.
+ */
+static svn_error_t *
+point_to_outer_element_instead(svn_branch__el_rev_id_t *el_rev,
+ const char *op,
+ apr_pool_t *scratch_pool)
+{
+ if (is_branch_root_element(el_rev->branch, el_rev->eid))
+ {
+ svn_branch__state_t *outer_branch;
+ int outer_eid;
+
+ svn_branch__get_outer_branch_and_eid(&outer_branch, &outer_eid,
+ el_rev->branch, scratch_pool);
+
+ if (! outer_branch)
+ return svn_error_createf(SVN_BRANCH__ERR, NULL, "%s: %s", op,
+ _("svnmover cannot delete or move a "
+ "top-level branch"));
+
+ el_rev->eid = outer_eid;
+ el_rev->branch = outer_branch;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+execute(svnmover_wc_t *wc,
+ const apr_array_header_t *actions,
+ const char *anchor_url,
+ apr_hash_t *revprops,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *pool)
+{
+ const char *base_relpath;
+ apr_pool_t *iterpool = svn_pool_create(pool);
+ int i;
+
+ base_relpath = svn_uri_skip_ancestor(wc->repos_root_url, anchor_url, pool);
+
+ for (i = 0; i < actions->nelts; ++i)
+ {
+ action_t *action = APR_ARRAY_IDX(actions, i, action_t *);
+ int j;
+ arg_t *arg[3] = { NULL, NULL, NULL };
+
+ svn_pool_clear(iterpool);
+
+ /* Before translating paths to/from elements, need a sequence point */
+ SVN_ERR(svn_branch__txn_sequence_point(wc->edit_txn, iterpool));
+
+ /* Convert each ACTION[j].{relpath, rev_spec} to
+ (EL_REV[j], PARENT_EL_REV[j], PATH_NAME[j], REVNUM[j]),
+ except for the local-path argument of a 'put' command. */
+ for (j = 0; j < 3; j++)
+ {
+ if (action->relpath[j]
+ && ! (action->action == ACTION_PUT_FILE && j == 0))
+ {
+ const char *rrpath, *parent_rrpath;
+
+ arg[j] = apr_palloc(iterpool, sizeof(*arg[j]));
+
+ rrpath = svn_relpath_join(base_relpath, action->relpath[j], iterpool);
+ parent_rrpath = svn_relpath_dirname(rrpath, iterpool);
+
+ arg[j]->path_name = svn_relpath_basename(rrpath, NULL);
+ SVN_ERR(find_el_rev_by_rrpath_rev(&arg[j]->el_rev, wc,
+ &action->rev_spec[j],
+ action->branch_id[j],
+ rrpath,
+ iterpool, iterpool));
+ SVN_ERR(find_el_rev_by_rrpath_rev(&arg[j]->parent_el_rev, wc,
+ &action->rev_spec[j],
+ action->branch_id[j],
+ parent_rrpath,
+ iterpool, iterpool));
+ }
+ }
+
+ switch (action->action)
+ {
+ case ACTION_INFO_WC:
+ {
+ svn_boolean_t is_modified;
+ svn_revnum_t base_rev_min, base_rev_max;
+
+ SVN_ERR(txn_is_changed(wc->working->branch->txn, &is_modified,
+ iterpool));
+ SVN_ERR(svnmover_wc_get_base_revs(wc, &base_rev_min, &base_rev_max,
+ iterpool));
+
+ svnmover_notify("Repository Root: %s", wc->repos_root_url);
+ if (base_rev_min == base_rev_max)
+ svnmover_notify("Base Revision: %ld", base_rev_min);
+ else
+ svnmover_notify("Base Revisions: %ld to %ld",
+ base_rev_min, base_rev_max);
+ svnmover_notify("Base Branch: %s", wc->base->branch->bid);
+ svnmover_notify("Working Branch: %s", wc->working->branch->bid);
+ SVN_ERR(show_branch_history(wc->working->branch, iterpool));
+ svnmover_notify("Modified: %s", is_modified ? "yes" : "no");
+ }
+ break;
+
+ case ACTION_INFO:
+ VERIFY_EID_EXISTS("info", 0);
+ {
+ /* If it's a nested branch root, show info for the outer element
+ first, and then for the inner element. */
+ if (is_branch_root_element(arg[0]->el_rev->branch,
+ arg[0]->el_rev->eid))
+ {
+ svn_branch__state_t *outer_branch;
+ int outer_eid;
+
+ svn_branch__get_outer_branch_and_eid(&outer_branch, &outer_eid,
+ arg[0]->el_rev->branch,
+ iterpool);
+ if (outer_branch)
+ {
+ svn_branch__el_rev_id_t *outer_e
+ = svn_branch__el_rev_id_create(outer_branch, outer_eid,
+ arg[0]->el_rev->rev,
+ iterpool);
+ SVN_ERR(do_info(wc, outer_e, iterpool));
+ }
+ }
+ SVN_ERR(do_info(wc, arg[0]->el_rev, iterpool));
+ }
+ break;
+
+ case ACTION_LIST_CONFLICTS:
+ {
+ if (svnmover_any_conflicts(wc->conflicts))
+ {
+ SVN_ERR(svnmover_display_conflicts(wc->conflicts, iterpool));
+ }
+ }
+ break;
+
+ case ACTION_RESOLVED_CONFLICT:
+ {
+ if (svnmover_any_conflicts(wc->conflicts))
+ {
+ SVN_ERR(svnmover_conflict_resolved(wc->conflicts,
+ action->relpath[0],
+ iterpool));
+ }
+ else
+ {
+ return svn_error_create(SVN_BRANCH__ERR, NULL,
+ _("No conflicts are currently flagged"));
+ }
+ }
+ break;
+
+ case ACTION_DIFF:
+ VERIFY_EID_EXISTS("diff", 0);
+ VERIFY_EID_EXISTS("diff", 1);
+ {
+ SVN_ERR(branch_diff_r(arg[0]->el_rev /*from*/,
+ arg[1]->el_rev /*to*/,
+ show_subtree_diff, "",
+ iterpool));
+ }
+ break;
+
+ case ACTION_STATUS:
+ {
+ svn_branch__el_rev_id_t *from, *to;
+
+ from = svn_branch__el_rev_id_create(wc->base->branch,
+ svn_branch__root_eid(wc->base->branch),
+ SVN_INVALID_REVNUM, iterpool);
+ to = svn_branch__el_rev_id_create(wc->working->branch,
+ svn_branch__root_eid(wc->working->branch),
+ SVN_INVALID_REVNUM, iterpool);
+ SVN_ERR(branch_diff_r(from, to,
+ show_subtree_diff, "",
+ iterpool));
+ }
+ break;
+
+ case ACTION_LOG:
+ VERIFY_EID_EXISTS("log", 0);
+ VERIFY_EID_EXISTS("log", 1);
+ {
+ SVN_ERR(do_log(arg[0]->el_rev /*from*/,
+ arg[1]->el_rev /*to*/,
+ iterpool));
+ }
+ break;
+
+ case ACTION_LIST_BRANCHES:
+ {
+ VERIFY_EID_EXISTS("branches", 0);
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ svnmover_notify_v("branches rooted at same element as '%s':",
+ action->relpath[0]);
+ }
+ else
+ {
+ svnmover_notify_v("branches rooted at e%d:",
+ arg[0]->el_rev->eid);
+ }
+ SVN_ERR(list_branches(
+ arg[0]->el_rev->branch->txn,
+ arg[0]->el_rev->eid,
+ FALSE, iterpool));
+ }
+ break;
+
+ case ACTION_LIST_BRANCHES_R:
+ {
+ if (the_ui_mode == UI_MODE_SERIAL)
+ {
+ svn_stream_t *stream;
+ SVN_ERR(svn_stream_for_stdout(&stream, iterpool));
+ SVN_ERR(svn_branch__txn_serialize(wc->working->branch->txn,
+ stream,
+ iterpool));
+ }
+ else
+ {
+ /* Note: BASE_REVISION is always a real revision number, here */
+ SVN_ERR(list_all_branches(wc->working->branch->txn, TRUE,
+ iterpool));
+ }
+ }
+ break;
+
+ case ACTION_LS:
+ {
+ VERIFY_EID_EXISTS("ls", 0);
+ if (the_ui_mode == UI_MODE_PATHS)
+ {
+ SVN_ERR(list_branch_elements(arg[0]->el_rev->branch, iterpool));
+ }
+ else if (the_ui_mode == UI_MODE_EIDS)
+ {
+ SVN_ERR(list_branch_elements_by_eid(arg[0]->el_rev->branch,
+ iterpool));
+ }
+ else
+ {
+ svn_stream_t *stream;
+ SVN_ERR(svn_stream_for_stdout(&stream, iterpool));
+ SVN_ERR(svn_branch__state_serialize(stream,
+ arg[0]->el_rev->branch,
+ iterpool));
+ }
+ }
+ break;
+
+ case ACTION_TBRANCH:
+ VERIFY_EID_EXISTS("tbranch", 0);
+ {
+ const char *from_branch_id = svn_branch__get_id(arg[0]->el_rev->branch,
+ iterpool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(arg[0]->el_rev->rev, from_branch_id,
+ arg[0]->el_rev->eid, iterpool);
+ svn_branch__state_t *new_branch;
+
+ SVN_ERR(do_topbranch(&new_branch, wc->edit_txn,
+ from,
+ iterpool, iterpool));
+ /* Switch the WC working state to this new branch */
+ wc->working->branch = new_branch;
+ }
+ break;
+
+ case ACTION_BRANCH:
+ VERIFY_EID_EXISTS("branch", 0);
+ VERIFY_REV_UNSPECIFIED("branch", 1);
+ VERIFY_EID_NONEXISTENT("branch", 1);
+ VERIFY_PARENT_EID_EXISTS("branch", 1);
+ {
+ const char *from_branch_id = svn_branch__get_id(arg[0]->el_rev->branch,
+ iterpool);
+ svn_branch__rev_bid_eid_t *from
+ = svn_branch__rev_bid_eid_create(arg[0]->el_rev->rev, from_branch_id,
+ arg[0]->el_rev->eid, iterpool);
+ svn_branch__state_t *new_branch;
+
+ SVN_ERR(do_branch(&new_branch, wc->edit_txn,
+ from,
+ arg[1]->el_rev->branch, arg[1]->parent_el_rev->eid,
+ arg[1]->path_name,
+ iterpool, iterpool));
+ }
+ break;
+
+ case ACTION_BRANCH_INTO:
+ VERIFY_EID_EXISTS("branch-into", 0);
+ VERIFY_REV_UNSPECIFIED("branch-into", 1);
+ VERIFY_EID_NONEXISTENT("branch-into", 1);
+ VERIFY_PARENT_EID_EXISTS("branch-into", 1);
+ {
+ SVN_ERR(do_branch_into(arg[0]->el_rev->branch, arg[0]->el_rev->eid,
+ arg[1]->el_rev->branch,
+ arg[1]->parent_el_rev->eid, arg[1]->path_name,
+ iterpool));
+ }
+ break;
+
+ case ACTION_MKBRANCH:
+ VERIFY_REV_UNSPECIFIED("mkbranch", 0);
+ VERIFY_EID_NONEXISTENT("mkbranch", 0);
+ VERIFY_PARENT_EID_EXISTS("mkbranch", 0);
+ {
+ apr_hash_t *props = apr_hash_make(iterpool);
+ svn_element__payload_t *payload
+ = svn_element__payload_create_dir(props, iterpool);
+
+ SVN_ERR(do_mkbranch(NULL, wc->edit_txn,
+ arg[0]->parent_el_rev->branch,
+ arg[0]->parent_el_rev->eid, arg[0]->path_name,
+ payload, iterpool));
+ }
+ break;
+
+ case ACTION_MERGE3:
+ {
+ VERIFY_EID_EXISTS("merge", 0);
+ VERIFY_EID_EXISTS("merge", 1);
+ VERIFY_REV_UNSPECIFIED("merge", 1);
+ VERIFY_EID_EXISTS("merge", 2);
+
+ SVN_ERR(do_merge(wc,
+ arg[0]->el_rev /*from*/,
+ arg[1]->el_rev /*to*/,
+ arg[2]->el_rev /*yca*/,
+ iterpool));
+ }
+ break;
+
+ case ACTION_AUTO_MERGE:
+ {
+ VERIFY_EID_EXISTS("merge", 0);
+ VERIFY_EID_EXISTS("merge", 1);
+ VERIFY_REV_UNSPECIFIED("merge", 1);
+
+ SVN_ERR(do_auto_merge(wc,
+ arg[0]->el_rev /*from*/,
+ arg[1]->el_rev /*to*/,
+ iterpool));
+ }
+ break;
+
+ case ACTION_MV:
+ SVN_ERR(point_to_outer_element_instead(arg[0]->el_rev, "mv",
+ iterpool));
+
+ VERIFY_REV_UNSPECIFIED("mv", 0);
+ VERIFY_EID_EXISTS("mv", 0);
+ VERIFY_REV_UNSPECIFIED("mv", 1);
+ VERIFY_EID_NONEXISTENT("mv", 1);
+ VERIFY_PARENT_EID_EXISTS("mv", 1);
+ VERIFY_NOT_CHILD_OF_SELF("mv", 0, 1, iterpool);
+
+ /* Simple move/rename within same branch, if possible */
+ if (BRANCH_IS_SAME_BRANCH(arg[1]->parent_el_rev->branch,
+ arg[0]->el_rev->branch,
+ iterpool))
+ {
+ SVN_ERR(do_move(arg[0]->el_rev,
+ arg[1]->parent_el_rev, arg[1]->path_name,
+ iterpool));
+ }
+ else
+ {
+ SVN_ERR(do_interactive_cross_branch_move(wc->edit_txn,
+ arg[0]->el_rev,
+ arg[1]->parent_el_rev,
+ arg[1]->path_name,
+ iterpool));
+ }
+ break;
+
+ case ACTION_CP:
+ VERIFY_REV_SPECIFIED("cp", 0);
+ /* (Or do we want to support copying from "this txn" too?) */
+ VERIFY_EID_EXISTS("cp", 0);
+ VERIFY_REV_UNSPECIFIED("cp", 1);
+ VERIFY_EID_NONEXISTENT("cp", 1);
+ VERIFY_PARENT_EID_EXISTS("cp", 1);
+ SVN_ERR(do_copy(arg[0]->el_rev,
+ arg[1]->parent_el_rev->branch,
+ arg[1]->parent_el_rev->eid, arg[1]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_RM:
+ SVN_ERR(point_to_outer_element_instead(arg[0]->el_rev, "rm",
+ iterpool));
+
+ VERIFY_REV_UNSPECIFIED("rm", 0);
+ VERIFY_EID_EXISTS("rm", 0);
+ SVN_ERR(do_delete(arg[0]->el_rev->branch, arg[0]->el_rev->eid,
+ iterpool));
+ break;
+
+ case ACTION_CP_RM:
+ SVN_ERR(point_to_outer_element_instead(arg[0]->el_rev,
+ "copy-and-delete", iterpool));
+
+ VERIFY_REV_UNSPECIFIED("copy-and-delete", 0);
+ VERIFY_EID_EXISTS("copy-and-delete", 0);
+ VERIFY_REV_UNSPECIFIED("copy-and-delete", 1);
+ VERIFY_EID_NONEXISTENT("copy-and-delete", 1);
+ VERIFY_PARENT_EID_EXISTS("copy-and-delete", 1);
+ VERIFY_NOT_CHILD_OF_SELF("copy-and-delete", 0, 1, iterpool);
+
+ SVN_ERR(do_copy_and_delete(arg[0]->el_rev,
+ arg[1]->parent_el_rev->branch,
+ arg[1]->parent_el_rev->eid,
+ arg[1]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_BR_RM:
+ SVN_ERR(point_to_outer_element_instead(arg[0]->el_rev,
+ "branch-and-delete",
+ iterpool));
+
+ VERIFY_REV_UNSPECIFIED("branch-and-delete", 0);
+ VERIFY_EID_EXISTS("branch-and-delete", 0);
+ VERIFY_REV_UNSPECIFIED("branch-and-delete", 1);
+ VERIFY_EID_NONEXISTENT("branch-and-delete", 1);
+ VERIFY_PARENT_EID_EXISTS("branch-and-delete", 1);
+ VERIFY_NOT_CHILD_OF_SELF("branch-and-delete", 0, 1, iterpool);
+
+ SVN_ERR(do_branch_and_delete(wc->edit_txn,
+ arg[0]->el_rev,
+ arg[1]->parent_el_rev->branch,
+ arg[1]->parent_el_rev->eid,
+ arg[1]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_BR_INTO_RM:
+ SVN_ERR(point_to_outer_element_instead(arg[0]->el_rev,
+ "branch-into-and-delete",
+ iterpool));
+
+ VERIFY_REV_UNSPECIFIED("branch-into-and-delete", 0);
+ VERIFY_EID_EXISTS("branch-into-and-delete", 0);
+ VERIFY_REV_UNSPECIFIED("branch-into-and-delete", 1);
+ VERIFY_EID_NONEXISTENT("branch-into-and-delete", 1);
+ VERIFY_PARENT_EID_EXISTS("branch-into-and-delete", 1);
+ VERIFY_NOT_CHILD_OF_SELF("branch-into-and-delete", 0, 1, iterpool);
+
+ SVN_ERR(do_branch_into_and_delete(arg[0]->el_rev,
+ arg[1]->parent_el_rev->branch,
+ arg[1]->parent_el_rev->eid,
+ arg[1]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_MKDIR:
+ VERIFY_REV_UNSPECIFIED("mkdir", 0);
+ VERIFY_EID_NONEXISTENT("mkdir", 0);
+ VERIFY_PARENT_EID_EXISTS("mkdir", 0);
+ SVN_ERR(do_mkdir(wc->edit_txn,
+ arg[0]->parent_el_rev->branch,
+ arg[0]->parent_el_rev->eid, arg[0]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_PUT_FILE:
+ VERIFY_REV_UNSPECIFIED("put", 1);
+ VERIFY_PARENT_EID_EXISTS("put", 1);
+ SVN_ERR(do_put_file(wc->edit_txn,
+ action->relpath[0],
+ arg[1]->el_rev,
+ arg[1]->parent_el_rev,
+ arg[1]->path_name,
+ iterpool));
+ break;
+
+ case ACTION_CAT:
+ VERIFY_EID_EXISTS("rm", 0);
+ SVN_ERR(do_cat(arg[0]->el_rev,
+ iterpool));
+ break;
+
+ case ACTION_COMMIT:
+ {
+ svn_revnum_t new_rev;
+
+ SVN_ERR(do_commit(&new_rev, wc, revprops, iterpool));
+ if (! SVN_IS_VALID_REVNUM(new_rev))
+ {
+ svnmover_notify_v("There are no changes to commit.");
+ }
+ }
+ break;
+
+ case ACTION_UPDATE:
+ /* ### If current WC branch doesn't exist in target rev, should
+ 'update' follow to a different branch? By following merge graph?
+ Presently it would try to update to a state of nonexistence. */
+ /* path (or eid) is currently required for syntax, but ignored */
+ VERIFY_EID_EXISTS("update", 0);
+ /* We require a rev to be specified because an unspecified rev
+ currently always means 'working version', whereas we would
+ want it to mean 'head' for this subcommand. */
+ VERIFY_REV_SPECIFIED("update", 0);
+ {
+ SVN_ERR(do_switch(wc, arg[0]->el_rev->rev, wc->base->branch,
+ iterpool));
+ }
+ break;
+
+ case ACTION_SWITCH:
+ VERIFY_EID_EXISTS("switch", 0);
+ {
+ SVN_ERR(do_switch(wc, arg[0]->el_rev->rev, arg[0]->el_rev->branch,
+ iterpool));
+ }
+ break;
+
+ case ACTION_REVERT:
+ {
+ SVN_ERR(do_revert(wc, iterpool));
+ }
+ break;
+
+ case ACTION_MIGRATE:
+ /* path (or eid) is currently required for syntax, but ignored */
+ VERIFY_EID_EXISTS("migrate", 0);
+ VERIFY_REV_SPECIFIED("migrate", 0);
+ {
+ SVN_ERR(do_migrate(wc,
+ arg[0]->el_rev->rev, arg[0]->el_rev->rev,
+ iterpool));
+ }
+ break;
+
+ default:
+ SVN_ERR_MALFUNCTION();
+ }
+
+ if (action->action != ACTION_COMMIT)
+ {
+ wc->list_of_commands
+ = apr_psprintf(pool, "%s%s\n",
+ wc->list_of_commands ? wc->list_of_commands : "",
+ svn_cstring_join2(action->action_args, " ",
+ TRUE, pool));
+ }
+ }
+ svn_pool_destroy(iterpool);
+ return SVN_NO_ERROR;
+}
+
+/* Perform the typical suite of manipulations for user-provided URLs
+ on URL, returning the result (allocated from POOL): IRI-to-URI
+ conversion, auto-escaping, and canonicalization. */
+static const char *
+sanitize_url(const char *url,
+ apr_pool_t *pool)
+{
+ url = svn_path_uri_from_iri(url, pool);
+ url = svn_path_uri_autoescape(url, pool);
+ return svn_uri_canonicalize(url, pool);
+}
+
+static const char *
+help_for_subcommand(const action_defn_t *action, apr_pool_t *pool)
+{
+ const char *cmd = apr_psprintf(pool, "%s %s",
+ action->name, action->args_help);
+
+ return apr_psprintf(pool, " %-22s : %s\n", cmd, action->help);
+}
+
+/* Print a usage message on STREAM, listing only the actions. */
+static void
+usage_actions_only(FILE *stream, apr_pool_t *pool)
+{
+ int i;
+
+ for (i = 0; i < sizeof (action_defn) / sizeof (action_defn[0]); i++)
+ svn_error_clear(svn_cmdline_fputs(
+ help_for_subcommand(&action_defn[i], pool),
+ stream, pool));
+}
+
+/* Print a usage message on STREAM. */
+static void
+usage(FILE *stream, apr_pool_t *pool)
+{
+ svn_error_clear(svn_cmdline_fputs(
+ _("usage: svnmover -U REPO_URL [ACTION...]\n"
+ "A client for experimenting with move tracking.\n"
+ "\n"
+ " Commit a batch of ACTIONs to a Subversion repository, as a single\n"
+ " new revision. With no ACTIONs specified, read actions interactively\n"
+ " from standard input, until EOF or ^C, and then commit the result.\n"
+ "\n"
+ " Action arguments are of the form\n"
+ " [^B<branch-id>/]<path>[@<revnum>]\n"
+ " where\n"
+ " <branch-id> defaults to the working branch or, when <revnum> is\n"
+ " given, to the base branch\n"
+ " <path> is a path relative to the branch\n"
+ " <revnum> is the revision number, when making a historic reference\n"
+ "\n"
+ " Move tracking metadata is stored in the repository, in on-disk files\n"
+ " for RA-local or in revprops otherwise.\n"
+ "\n"
+ "Actions:\n"),
+ stream, pool));
+ usage_actions_only(stream, pool);
+ svn_error_clear(svn_cmdline_fputs(
+ _("\n"
+ "Valid options:\n"
+ " --ui={eids|e|paths|p} : display information as elements or as paths\n"
+ " --colo[u]r={always|never|auto}\n"
+ " : use coloured output; 'auto' means when standard\n"
+ " output goes to a terminal; default: never\n"
+ " -h, -? [--help] : display this text\n"
+ " -v [--verbose] : display debugging messages\n"
+ " -q [--quiet] : suppress notifications\n"
+ " -m [--message] ARG : use ARG as a log message\n"
+ " -F [--file] ARG : read log message from file ARG\n"
+ " -u [--username] ARG : commit the changes as username ARG\n"
+ " -p [--password] ARG : use ARG as the password\n"
+ " -U [--root-url] ARG : interpret all action URLs relative to ARG\n"
+ " -r [--revision] ARG : use revision ARG as baseline for changes\n"
+ " -B [--branch-id] ARG : work on the branch identified by ARG\n"
+ " --with-revprop ARG : set revision property in the following format:\n"
+ " NAME[=VALUE]\n"
+ " --non-interactive : do no interactive prompting (default is to\n"
+ " prompt only if standard input is a terminal)\n"
+ " --force-interactive : do interactive prompting even if standard\n"
+ " input is not a terminal\n"
+ " --trust-server-cert : accept SSL server certificates from unknown\n"
+ " certificate authorities without prompting (but\n"
+ " only with '--non-interactive')\n"
+ " -X [--extra-args] ARG : append arguments from file ARG (one per line;\n"
+ " use \"-\" to read from standard input)\n"
+ " --config-dir ARG : use ARG to override the config directory\n"
+ " --config-option ARG : use ARG to override a configuration option\n"
+ " --no-auth-cache : do not cache authentication tokens\n"
+ " --version : print version information\n"),
+ stream, pool));
+}
+
+static svn_error_t *
+insufficient(int i, apr_pool_t *pool)
+{
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "insufficient arguments:\n"
+ "%s",
+ help_for_subcommand(&action_defn[i], pool));
+}
+
+static svn_error_t *
+display_version(apr_getopt_t *os, svn_boolean_t _quiet, apr_pool_t *pool)
+{
+ const char *ra_desc_start
+ = "The following repository access (RA) modules are available:\n\n";
+ svn_stringbuf_t *version_footer;
+
+ version_footer = svn_stringbuf_create(ra_desc_start, pool);
+ SVN_ERR(svn_ra_print_modules(version_footer, pool));
+
+ SVN_ERR(svn_opt_print_help4(NULL, "svnmover", TRUE, _quiet, FALSE,
+ version_footer->data,
+ NULL, NULL, NULL, NULL, NULL, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Return an error about the mutual exclusivity of the -m, -F, and
+ --with-revprop=svn:log command-line options. */
+static svn_error_t *
+mutually_exclusive_logs_error(void)
+{
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--message (-m), --file (-F), and "
+ "--with-revprop=svn:log are mutually "
+ "exclusive"));
+}
+
+/* Obtain the log message from multiple sources, producing an error
+ if there are multiple sources. Store the result in *FINAL_MESSAGE. */
+static svn_error_t *
+get_log_message(const char **final_message,
+ const char *message,
+ apr_hash_t *revprops,
+ svn_stringbuf_t *filedata,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool)
+{
+ svn_string_t *msg;
+
+ *final_message = NULL;
+ /* If we already have a log message in the revprop hash, then just
+ make sure the user didn't try to also use -m or -F. Otherwise,
+ we need to consult -m or -F to find a log message, if any. */
+ msg = svn_hash_gets(revprops, SVN_PROP_REVISION_LOG);
+ if (msg)
+ {
+ if (filedata || message)
+ return mutually_exclusive_logs_error();
+
+ /* Remove it from the revprops; it will be re-added later */
+ svn_hash_sets(revprops, SVN_PROP_REVISION_LOG, NULL);
+ }
+ else if (filedata)
+ {
+ if (message)
+ return mutually_exclusive_logs_error();
+
+ msg = svn_string_create(filedata->data, scratch_pool);
+ }
+ else if (message)
+ {
+ msg = svn_string_create(message, scratch_pool);
+ }
+
+ if (msg)
+ {
+ SVN_ERR_W(svn_subst_translate_string2(&msg, NULL, NULL,
+ msg, NULL, FALSE,
+ result_pool, scratch_pool),
+ _("Error normalizing log message to internal format"));
+
+ *final_message = msg->data;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static const char *const special_commands[] =
+{
+ "help",
+ "--verbose",
+ "--ui=paths", "--ui=eids", "--ui=serial",
+};
+
+/* Parse the action arguments into action structures. */
+static svn_error_t *
+parse_actions(apr_array_header_t **actions,
+ apr_array_header_t *action_args,
+ apr_pool_t *pool)
+{
+ int i;
+
+ *actions = apr_array_make(pool, 1, sizeof(action_t *));
+
+ for (i = 0; i < action_args->nelts; ++i)
+ {
+ int j, k, num_url_args;
+ const char *action_string = APR_ARRAY_IDX(action_args, i, const char *);
+ action_t *action = apr_pcalloc(pool, sizeof(*action));
+ const char *cp_from_rev = NULL;
+
+ /* First, parse the action. Handle some special actions immediately;
+ handle normal subcommands by looking them up in the table. */
+ if (! strcmp(action_string, "?") || ! strcmp(action_string, "h")
+ || ! strcmp(action_string, "help"))
+ {
+ usage_actions_only(stdout, pool);
+ return SVN_NO_ERROR;
+ }
+ if (! strncmp(action_string, "--ui=", 5))
+ {
+ SVN_ERR(svn_token__from_word_err(&the_ui_mode, ui_mode_map,
+ action_string + 5));
+ continue;
+ }
+ if (! strcmp(action_string, "--verbose")
+ || ! strcmp(action_string, "-v"))
+ {
+ quiet = !quiet;
+ svnmover_notify("verbose mode %s", quiet ? "off" : "on");
+ continue;
+ }
+ for (j = 0; j < sizeof(action_defn) / sizeof(action_defn[0]); j++)
+ {
+ if (strcmp(action_string, action_defn[j].name) == 0)
+ {
+ action->action = action_defn[j].code;
+ num_url_args = action_defn[j].num_args;
+ break;
+ }
+ }
+ if (j == sizeof(action_defn) / sizeof(action_defn[0]))
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "'%s' is not an action; try 'help'.",
+ action_string);
+
+ action->action_args = apr_array_make(pool, 0, sizeof(const char *));
+ APR_ARRAY_PUSH(action->action_args, const char *) = action_string;
+
+ if (action->action == ACTION_CP)
+ {
+ /* next argument is the copy source revision */
+ if (++i == action_args->nelts)
+ return svn_error_trace(insufficient(j, pool));
+ cp_from_rev = APR_ARRAY_IDX(action_args, i, const char *);
+ APR_ARRAY_PUSH(action->action_args, const char *) = cp_from_rev;
+ }
+
+ /* Parse the required number of URLs. */
+ for (k = 0; k < num_url_args; ++k)
+ {
+ const char *path;
+
+ if (++i == action_args->nelts)
+ return svn_error_trace(insufficient(j, pool));
+ path = APR_ARRAY_IDX(action_args, i, const char *);
+ APR_ARRAY_PUSH(action->action_args, const char *) = path;
+
+ if (cp_from_rev && k == 0)
+ {
+ path = apr_psprintf(pool, "%s@%s", path, cp_from_rev);
+ }
+
+ SVN_ERR(svn_opt_parse_path(&action->rev_spec[k], &path, path, pool));
+
+ /* If there's an ANCHOR_URL, we expect URL to be a path
+ relative to ANCHOR_URL (and we build a full url from the
+ combination of the two). Otherwise, it should be a full
+ url. */
+ if (svn_path_is_url(path))
+ {
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "Argument '%s' is a URL; use "
+ "--root-url (-U) instead", path);
+ }
+ /* Parse "^B<branch-id>/path" syntax. */
+ if (strncmp("^B", path, 2) == 0)
+ {
+ const char *slash = strchr(path, '/');
+
+ action->branch_id[k]
+ = slash ? apr_pstrndup(pool, path + 1, slash - (path + 1))
+ : path + 1;
+ path = slash ? slash + 1 : "";
+ }
+ /* These args must be relpaths, except for the 'local file' arg
+ of a 'put' command. */
+ if (! svn_relpath_is_canonical(path)
+ && ! (action->action == ACTION_PUT_FILE && k == 0))
+ {
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "Argument '%s' is not a relative path "
+ "or a URL", path);
+ }
+ action->relpath[k] = path;
+ }
+
+ APR_ARRAY_PUSH(*actions, action_t *) = action;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+#ifdef HAVE_LINENOISE
+/* A command-line completion callback for the 'Line Noise' interactive
+ * prompting.
+ *
+ * This is called when the user presses the Tab key. It calculates the
+ * possible completions for the partial line BUF.
+ *
+ * ### So far, this only works on a single command keyword at the start
+ * of the line.
+ */
+static void
+linenoise_completion(const char *buf, linenoiseCompletions *lc)
+{
+ int i;
+
+ for (i = 0; i < sizeof(special_commands) / sizeof(special_commands[0]); i++)
+ {
+ /* Suggest each command that matches (and is longer than) what the
+ user has already typed. Add a space. */
+ if (strncmp(buf, special_commands[i], strlen(buf)) == 0
+ && strlen(special_commands[i]) > strlen(buf))
+ {
+ static char completion[100];
+
+ apr_cpystrn(completion, special_commands[i], 99);
+ strcat(completion, " ");
+ linenoiseAddCompletion(lc, completion);
+ }
+ }
+
+ for (i = 0; i < sizeof(action_defn) / sizeof(action_defn[0]); i++)
+ {
+ /* Suggest each command that matches (and is longer than) what the
+ user has already typed. Add a space. */
+ if (strncmp(buf, action_defn[i].name, strlen(buf)) == 0
+ && strlen(action_defn[i].name) > strlen(buf))
+ {
+ static char completion[100];
+
+ apr_cpystrn(completion, action_defn[i].name, 99);
+ strcat(completion, " ");
+ linenoiseAddCompletion(lc, completion);
+ }
+ }
+}
+#endif
+
+/* Display a prompt, read a line of input and split it into words.
+ *
+ * Set *WORDS to null if input is cancelled (by ctrl-C for example).
+ */
+static svn_error_t *
+read_words(apr_array_header_t **words,
+ const char *prompt,
+ apr_pool_t *result_pool)
+{
+ svn_error_t *err;
+ const char *input;
+
+ settext(TEXT_FG_YELLOW);
+ err = svnmover_prompt_user(&input, prompt, result_pool);
+ settext(TEXT_RESET);
+ if (err && (err->apr_err == SVN_ERR_CANCELLED || err->apr_err == APR_EOF))
+ {
+ *words = NULL;
+ svn_error_clear(err);
+ return SVN_NO_ERROR;
+ }
+ SVN_ERR(err);
+ *words = svn_cstring_split(input, " ", TRUE /*chop_whitespace*/, result_pool);
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ apr_array_header_t *actions;
+ svn_error_t *err = SVN_NO_ERROR;
+ apr_getopt_t *opts;
+ enum {
+ config_dir_opt = SVN_OPT_FIRST_LONGOPT_ID,
+ config_inline_opt,
+ no_auth_cache_opt,
+ version_opt,
+ with_revprop_opt,
+ non_interactive_opt,
+ force_interactive_opt,
+ trust_server_cert_opt,
+ trust_server_cert_failures_opt,
+ ui_opt,
+ colour_opt,
+ auth_password_from_stdin_opt
+ };
+ static const apr_getopt_option_t options[] = {
+ {"verbose", 'v', 0, ""},
+ {"quiet", 'q', 0, ""},
+ {"message", 'm', 1, ""},
+ {"file", 'F', 1, ""},
+ {"username", 'u', 1, ""},
+ {"password", 'p', 1, ""},
+ {"password-from-stdin", auth_password_from_stdin_opt, 1, ""},
+ {"root-url", 'U', 1, ""},
+ {"revision", 'r', 1, ""},
+ {"branch-id", 'B', 1, ""},
+ {"with-revprop", with_revprop_opt, 1, ""},
+ {"extra-args", 'X', 1, ""},
+ {"help", 'h', 0, ""},
+ {NULL, '?', 0, ""},
+ {"non-interactive", non_interactive_opt, 0, ""},
+ {"force-interactive", force_interactive_opt, 0, ""},
+ {"trust-server-cert", trust_server_cert_opt, 0, ""},
+ {"trust-server-cert-failures", trust_server_cert_failures_opt, 1, ""},
+ {"config-dir", config_dir_opt, 1, ""},
+ {"config-option", config_inline_opt, 1, ""},
+ {"no-auth-cache", no_auth_cache_opt, 0, ""},
+ {"version", version_opt, 0, ""},
+ {"ui", ui_opt, 1, ""},
+ {"colour", colour_opt, 1, ""},
+ {"color", colour_opt, 1, ""},
+ {NULL, 0, 0, NULL}
+ };
+ const char *message = NULL;
+ svn_stringbuf_t *filedata = NULL;
+ const char *username = NULL, *password = NULL;
+ const char *anchor_url = NULL, *extra_args_file = NULL;
+ const char *config_dir = NULL;
+ apr_array_header_t *config_options;
+ svn_boolean_t show_version = FALSE;
+ svn_boolean_t non_interactive = FALSE;
+ svn_boolean_t force_interactive = FALSE;
+ svn_boolean_t interactive_actions;
+ svn_boolean_t trust_unknown_ca = FALSE;
+ svn_boolean_t trust_cn_mismatch = FALSE;
+ svn_boolean_t trust_expired = FALSE;
+ svn_boolean_t trust_not_yet_valid = FALSE;
+ svn_boolean_t trust_other_failure = FALSE;
+ svn_boolean_t no_auth_cache = FALSE;
+ svn_revnum_t base_revision = SVN_INVALID_REVNUM;
+ const char *branch_id = "B0"; /* default branch */
+ apr_array_header_t *action_args;
+ apr_hash_t *revprops = apr_hash_make(pool);
+ apr_hash_t *cfg_hash;
+ svn_config_t *cfg_config;
+ svn_client_ctx_t *ctx;
+ const char *log_msg;
+ svn_tristate_t coloured_output = svn_tristate_false;
+ svnmover_wc_t *wc;
+ svn_boolean_t read_pass_from_stdin = FALSE;
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+ config_options = apr_array_make(pool, 0,
+ sizeof(svn_cmdline__config_argument_t*));
+
+ apr_getopt_init(&opts, pool, argc, argv);
+ opts->interleave = 1;
+ while (1)
+ {
+ int opt;
+ const char *arg;
+ const char *opt_arg;
+
+ apr_status_t status = apr_getopt_long(opts, options, &opt, &arg);
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ return svn_error_wrap_apr(status, "getopt failure");
+ switch(opt)
+ {
+ case 'v':
+ quiet = FALSE;
+ break;
+ case 'q':
+ quiet = TRUE;
+ break;
+ case 'm':
+ SVN_ERR(svn_utf_cstring_to_utf8(&message, arg, pool));
+ break;
+ case 'F':
+ {
+ const char *filename;
+ SVN_ERR(svn_utf_cstring_to_utf8(&filename, arg, pool));
+ SVN_ERR(svn_stringbuf_from_file2(&filedata, filename, pool));
+ }
+ break;
+ case 'u':
+ username = apr_pstrdup(pool, arg);
+ break;
+ case 'p':
+ password = apr_pstrdup(pool, arg);
+ break;
+ case auth_password_from_stdin_opt:
+ read_pass_from_stdin = TRUE;
+ break;
+ case 'U':
+ SVN_ERR(svn_utf_cstring_to_utf8(&anchor_url, arg, pool));
+ if (! svn_path_is_url(anchor_url))
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "'%s' is not a URL", anchor_url);
+ anchor_url = sanitize_url(anchor_url, pool);
+ break;
+ case 'r':
+ {
+ const char *saved_arg = arg;
+ char *digits_end = NULL;
+ while (*arg == 'r')
+ arg++;
+ base_revision = strtol(arg, &digits_end, 10);
+ if ((! SVN_IS_VALID_REVNUM(base_revision))
+ || (! digits_end)
+ || *digits_end)
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("Invalid revision number '%s'"),
+ saved_arg);
+ }
+ break;
+ case 'B':
+ branch_id = (arg[0] == 'B') ? apr_pstrdup(pool, arg)
+ : apr_psprintf(pool, "B%s", arg);
+ break;
+ case with_revprop_opt:
+ SVN_ERR(svn_opt_parse_revprop(&revprops, arg, pool));
+ break;
+ case 'X':
+ SVN_ERR(svn_utf_cstring_to_utf8(&extra_args_file, arg, pool));
+ break;
+ case non_interactive_opt:
+ non_interactive = TRUE;
+ break;
+ case force_interactive_opt:
+ force_interactive = TRUE;
+ break;
+ case trust_server_cert_opt:
+ trust_unknown_ca = TRUE;
+ break;
+ case trust_server_cert_failures_opt:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_arg, arg, pool));
+ SVN_ERR(svn_cmdline__parse_trust_options(
+ &trust_unknown_ca,
+ &trust_cn_mismatch,
+ &trust_expired,
+ &trust_not_yet_valid,
+ &trust_other_failure,
+ opt_arg, pool));
+ break;
+ case config_dir_opt:
+ SVN_ERR(svn_utf_cstring_to_utf8(&config_dir, arg, pool));
+ break;
+ case config_inline_opt:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_arg, arg, pool));
+ SVN_ERR(svn_cmdline__parse_config_option(config_options, opt_arg,
+ "svnmover: ", pool));
+ break;
+ case no_auth_cache_opt:
+ no_auth_cache = TRUE;
+ break;
+ case version_opt:
+ show_version = TRUE;
+ break;
+ case ui_opt:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_arg, arg, pool));
+ SVN_ERR(svn_token__from_word_err(&the_ui_mode, ui_mode_map, opt_arg));
+ break;
+ case colour_opt:
+ if (strcmp(arg, "always") == 0)
+ coloured_output = svn_tristate_true;
+ else if (strcmp(arg, "never") == 0)
+ coloured_output = svn_tristate_false;
+ else if (strcmp(arg, "auto") == 0)
+ coloured_output = svn_tristate_unknown;
+ else
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("Bad argument in '--colour=%s': "
+ "use one of 'always', 'never', 'auto'"),
+ arg);
+ break;
+ case 'h':
+ case '?':
+ usage(stdout, pool);
+ return SVN_NO_ERROR;
+ }
+ }
+
+ if (show_version)
+ {
+ SVN_ERR(display_version(opts, quiet, pool));
+ return SVN_NO_ERROR;
+ }
+
+ if (coloured_output == svn_tristate_true)
+ use_coloured_output = TRUE;
+ else if (coloured_output == svn_tristate_false)
+ use_coloured_output = FALSE;
+ else
+ use_coloured_output = (svn_cmdline__stdout_is_a_terminal()
+ && svn_cmdline__stderr_is_a_terminal());
+
+ if (non_interactive && force_interactive)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--non-interactive and --force-interactive "
+ "are mutually exclusive"));
+ }
+ else
+ non_interactive = !svn_cmdline__be_interactive(non_interactive,
+ force_interactive);
+
+ if (!non_interactive)
+ {
+ if (trust_unknown_ca || trust_cn_mismatch || trust_expired
+ || trust_not_yet_valid || trust_other_failure)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--trust-server-cert-failures requires "
+ "--non-interactive"));
+ }
+
+ /* --password-from-stdin can only be used with --non-interactive */
+ if (read_pass_from_stdin && !non_interactive)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ _("--password-from-stdin requires "
+ "--non-interactive"));
+ }
+
+ /* Now initialize the client context */
+
+ err = svn_config_get_config(&cfg_hash, config_dir, pool);
+ if (err)
+ {
+ /* Fallback to default config if the config directory isn't readable
+ or is not a directory. */
+ if (APR_STATUS_IS_EACCES(err->apr_err)
+ || SVN__APR_STATUS_IS_ENOTDIR(err->apr_err))
+ {
+ svn_handle_warning2(stderr, err, "svnmover: ");
+ svn_error_clear(err);
+
+ SVN_ERR(svn_config__get_default_config(&cfg_hash, pool));
+ }
+ else
+ return err;
+ }
+
+ if (config_options)
+ {
+ svn_error_clear(
+ svn_cmdline__apply_config_options(cfg_hash, config_options,
+ "svnmover: ", "--config-option"));
+ }
+
+ /* Get password from stdin if necessary */
+ if (read_pass_from_stdin)
+ {
+ SVN_ERR(svn_cmdline__stdin_readline(&password, pool, pool));
+ }
+
+ SVN_ERR(svn_client_create_context2(&ctx, cfg_hash, pool));
+
+ cfg_config = svn_hash_gets(cfg_hash, SVN_CONFIG_CATEGORY_CONFIG);
+ SVN_ERR(svn_cmdline_create_auth_baton2(&ctx->auth_baton,
+ non_interactive,
+ username,
+ password,
+ config_dir,
+ no_auth_cache,
+ trust_unknown_ca,
+ trust_cn_mismatch,
+ trust_expired,
+ trust_not_yet_valid,
+ trust_other_failure,
+ cfg_config,
+ ctx->cancel_func,
+ ctx->cancel_baton,
+ pool));
+
+ /* Get the commit log message */
+ SVN_ERR(get_log_message(&log_msg, message, revprops, filedata,
+ pool, pool));
+
+ /* Put the log message in the list of revprops, and check that the user
+ did not try to supply any other "svn:*" revprops. */
+ if (svn_prop_has_svn_prop(revprops, pool))
+ return svn_error_create(SVN_ERR_CLIENT_PROPERTY_NAME, NULL,
+ _("Standard properties can't be set "
+ "explicitly as revision properties"));
+ if (log_msg)
+ {
+ svn_hash_sets(revprops, SVN_PROP_REVISION_LOG,
+ svn_string_create(log_msg, pool));
+ }
+
+ /* Help command: if given before any actions, then display full help
+ (and ANCHOR_URL need not have been provided). */
+ if (opts->ind < opts->argc && strcmp(opts->argv[opts->ind], "help") == 0)
+ {
+ usage(stdout, pool);
+ return SVN_NO_ERROR;
+ }
+
+ if (!anchor_url)
+ return svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL,
+ "--root-url (-U) not provided");
+
+ /* Copy the rest of our command-line arguments to an array,
+ UTF-8-ing them along the way. */
+ /* If there are extra arguments in a supplementary file, tack those
+ on, too (again, in UTF8 form). */
+ action_args = apr_array_make(pool, opts->argc, sizeof(const char *));
+ if (extra_args_file)
+ {
+ svn_stringbuf_t *contents, *contents_utf8;
+
+ SVN_ERR(svn_stringbuf_from_file2(&contents, extra_args_file, pool));
+ SVN_ERR(svn_utf_stringbuf_to_utf8(&contents_utf8, contents, pool));
+ svn_cstring_split_append(action_args, contents_utf8->data, "\n\r",
+ FALSE, pool);
+ }
+
+ interactive_actions = !(opts->ind < opts->argc
+ || extra_args_file
+ || non_interactive);
+
+ if (interactive_actions)
+ {
+#ifdef HAVE_LINENOISE
+ linenoiseSetCompletionCallback(linenoise_completion);
+#endif
+ }
+
+ SVN_ERR(wc_create(&wc,
+ anchor_url, base_revision,
+ branch_id,
+ ctx, pool, pool));
+
+ do
+ {
+ /* Parse arguments -- converting local style to internal style,
+ * repos-relative URLs to regular URLs, etc. */
+ err = svn_client_args_to_target_array2(&action_args, opts, action_args,
+ ctx, FALSE, pool);
+ if (! err)
+ err = parse_actions(&actions, action_args, pool);
+ if (! err)
+ err = execute(wc, actions, anchor_url, revprops, ctx, pool);
+ if (err)
+ {
+ if (err->apr_err == SVN_ERR_AUTHN_FAILED && non_interactive)
+ err = svn_error_quick_wrap(err,
+ _("Authentication failed and interactive"
+ " prompting is disabled; see the"
+ " --force-interactive option"));
+ if (interactive_actions)
+ {
+ /* Display the error, but don't quit */
+ settext_stderr(TEXT_FG_RED);
+ svn_handle_error2(err, stderr, FALSE, "svnmover: ");
+ settext_stderr(TEXT_RESET);
+ svn_error_clear(err);
+ }
+ else
+ SVN_ERR(err);
+ }
+
+ /* Possibly read more actions from the command line */
+ if (interactive_actions)
+ {
+ SVN_ERR(read_words(&action_args, "svnmover> ", pool));
+ }
+ }
+ while (interactive_actions && action_args);
+
+ /* Final commit */
+ err = commit(NULL, wc, revprops, pool);
+ svn_pool_destroy(wc->pool);
+ SVN_ERR(err);
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svnmover", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ svn_error_set_malfunction_handler(svn_error_raise_on_malfunction);
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ settext_stderr(TEXT_FG_RED);
+ svn_cmdline_handle_exit_error(err, NULL, "svnmover: ");
+ settext_stderr(TEXT_RESET);
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/dev/svnmover/svnmover.h b/tools/dev/svnmover/svnmover.h
new file mode 100644
index 0000000..a2d8424
--- /dev/null
+++ b/tools/dev/svnmover/svnmover.h
@@ -0,0 +1,295 @@
+/**
+ * @copyright
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ * @endcopyright
+ *
+ * @file svnmover.h
+ * @brief Concept Demo for Move Tracking and Branching
+ */
+
+#ifndef SVNMOVER_H
+#define SVNMOVER_H
+
+#include "svn_types.h"
+#include "svn_client.h"
+#include "svn_ra.h"
+
+#include "private/svn_branch.h"
+#include "private/svn_branch_compat.h"
+
+/* Decide whether to use the 'linenoise' library for command-line input
+ editing and completion. */
+#ifndef WIN32
+#define HAVE_LINENOISE
+#endif
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/* Like apr_hash_overlay() and apr_hash_merge() except allocating the
+ * result in the pool of the first input hash (OVERLAY and H1 respectively).
+ *
+ * When APR pool debugging is enabled, these functions require that the
+ * result pool does not have greater lifetime than the inputs, so passing
+ * an arbitrary result pool doesn't work well.
+ *
+ * If the second hash's pool has a shorter lifetime than that of the first,
+ * you're out of luck.
+ */
+#define hash_overlay(overlay, base) \
+ apr_hash_overlay(apr_hash_pool_get(overlay), overlay, base)
+#define hash_merge(overlay, h1) \
+ apr_hash_merge(apr_hash_pool_get(overlay), h1, h2, merger, data)
+
+
+enum { UI_MODE_EIDS, UI_MODE_PATHS, UI_MODE_SERIAL };
+extern int the_ui_mode;
+
+
+/* Display PROMPT_STR, read a line of text, and set *RESULT to that line.
+ *
+ * The interface here is similar to svn_cmdline_prompt_user2().
+ */
+svn_error_t *
+svnmover_prompt_user(const char **result,
+ const char *prompt_str,
+ apr_pool_t *pool);
+
+/* Print a notification. */
+__attribute__((format(printf, 1, 2)))
+void
+svnmover_notify(const char *fmt,
+ ...);
+
+/* Print a verbose notification: in 'quiet' mode, don't print it. */
+__attribute__((format(printf, 1, 2)))
+void
+svnmover_notify_v(const char *fmt,
+ ...);
+
+typedef struct svnmover_wc_version_t
+{
+ svn_revnum_t revision; /* always SVN_INVALID_REVNUM in working version */
+ svn_branch__state_t *branch;
+} svnmover_wc_version_t;
+
+/* Return (left, right) pairs of element content that differ between
+ * LEFT and RIGHT.
+ *
+ * Examine only the elements listed in ELEMENTS, a hash of (eid ->
+ * [anything]). If ELEMENTS is NULL, use the union of LEFT and RIGHT.
+ *
+ * LEFT and/or RIGHT may be null, meaning an empty set of elements.
+ *
+ * Set *DIFF_P to a hash of (eid -> (svn_element__content_t *)[2]).
+ */
+svn_error_t *
+svnmover_element_differences(apr_hash_t **diff_p,
+ const svn_element__tree_t *left,
+ const svn_element__tree_t *right,
+ apr_hash_t *elements,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* */
+typedef struct conflict_storage_t conflict_storage_t;
+
+typedef struct svnmover_wc_t
+{
+ apr_pool_t *pool;
+ const char *repos_root_url;
+ /*const char *anchor_repos_relpath;*/
+ svn_revnum_t head_revision;
+
+ svn_ra_session_t *ra_session;
+ svn_branch__txn_t *edit_txn;
+ conflict_storage_t *conflicts;
+
+ /* The base revisions, for (at least) all EIDs in BASE:
+ branch_id -> hash { eid -> revnum } */
+ apr_hash_t *base_revs;
+
+ /* Base and working versions. */
+ svnmover_wc_version_t *base, *working;
+
+ /* Textual list of commands the commands that were executed, suitable
+ for putting in a log message */
+ const char *list_of_commands;
+
+ svn_client_ctx_t *ctx;
+
+} svnmover_wc_t;
+
+struct conflict_storage_t
+{
+ svn_branch__state_t *yca_branch, *src_branch, *tgt_branch, *merged_branch;
+
+ /* Single-element conflicts */
+ /* (eid -> element_merge3_conflict_t) */
+ apr_hash_t *element_merge_conflicts;
+
+ /* Name-clash conflicts */
+ /* ("%{parent_eid}d/%{name}s" -> name_clash_conflict_t) */
+ apr_hash_t *name_clash_conflicts;
+
+ /* Cycle conflicts */
+ /* (eid -> cycle_conflict_t) */
+ apr_hash_t *cycle_conflicts;
+
+ /* Orphan conflicts */
+ /* (eid -> orphan_conflict_t) */
+ apr_hash_t *orphan_conflicts;
+};
+
+/* Three-way-merge the changes from YCA to SRC and YCA to TGT. YCA is
+ * notionally the youngest common ancestor of SRC and TGT.
+ *
+ * The elements to merge are the union of the elements in the three input
+ * subtrees (SRC, TGT, YCA). For each such element, merge the two changes:
+ * YCA -> SRC and YCA -> TGT, applying the result to EDIT_TXN:EDIT_BRANCH.
+ *
+ * If conflicts arise, return them in *CONFLICT_STORAGE_P; otherwise set
+ * that to null.
+ *
+ * SRC, TGT and YCA must be existing and corresponding (same EID) elements.
+ *
+ * None of SRC, TGT and YCA is a subbranch root element.
+ *
+ * Nested subbranches will also be merged.
+ */
+svn_error_t *
+svnmover_branch_merge(svn_branch__txn_t *edit_txn,
+ svn_branch__state_t *edit_branch,
+ conflict_storage_t **conflict_storage_p,
+ svn_branch__el_rev_id_t *src,
+ svn_branch__el_rev_id_t *tgt,
+ svn_branch__el_rev_id_t *yca,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* */
+svn_error_t *
+svnmover_display_conflicts(conflict_storage_t *conflict_storage,
+ apr_pool_t *scratch_pool);
+
+svn_error_t *
+svnmover_conflict_resolved(conflict_storage_t *conflicts,
+ const char *id_string,
+ apr_pool_t *scratch_pool);
+
+/* */
+svn_boolean_t
+svnmover_any_conflicts(const conflict_storage_t *conflicts);
+
+/* Load branching info.
+ */
+svn_error_t *
+svn_ra_load_branching_state(svn_branch__txn_t **branching_txn_p,
+ svn_branch__compat_fetch_func_t *fetch_func,
+ void **fetch_baton,
+ svn_ra_session_t *session,
+ const char *branch_info_dir,
+ svn_revnum_t base_revision,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+/* Ev3 version of svn_ra_get_commit_editor().
+ *
+ * If BRANCH_INFO_DIR is non-null, store branching info in that local
+ * directory, otherwise store branching info in revprops.
+ */
+svn_error_t *
+svn_ra_get_commit_txn(svn_ra_session_t *session,
+ svn_branch__txn_t **edit_txn_p,
+ apr_hash_t *revprop_table,
+ svn_commit_callback2_t commit_callback,
+ void *commit_baton,
+ apr_hash_t *lock_tokens,
+ svn_boolean_t keep_locks,
+ const char *branch_info_dir,
+ apr_pool_t *pool);
+
+/** Describes a server-side move (really a copy+delete within the same
+ * revision) which has been identified by scanning the revision log.
+ */
+typedef struct svn_repos_move_info_t {
+ /* The repository relpath the node was moved from. */
+ const char *moved_from_repos_relpath;
+
+ /* The repository relpath the node was moved to. */
+ const char *moved_to_repos_relpath;
+
+ /* The revision in which the move happened. */
+ svn_revnum_t revision;
+
+ /* The copyfrom revision of the moved-to path. */
+ svn_revnum_t copyfrom_rev;
+
+ /* Pointers to previous or subsequent moves of the same node
+ * within interesting history. */
+ struct svn_repos_move_info_t *prev;
+ struct svn_repos_move_info_t *next;
+
+ /* @note Fields may be added to the end of this structure in future
+ * versions. Therefore, to preserve binary compatibility, users
+ * should not directly allocate structures of this type but should use
+ * svn_wc_create_repos_move_info(). */
+} svn_repos_move_info_t;
+
+/** Create a svn_wc_repos_move_info_t structure.
+ * @see svn_wc_repos_move_info_t
+ */
+svn_repos_move_info_t *
+svn_repos_move_info_create(const char *moved_from_repos_relpath,
+ const char *moved_to_repos_relpath,
+ svn_revnum_t revision,
+ svn_revnum_t copyfrom_rev,
+ svn_repos_move_info_t *prev,
+ svn_repos_move_info_t *next,
+ apr_pool_t *result_pool);
+
+/* ...
+ */
+const char *
+svn_client__format_move_chain_for_display(svn_repos_move_info_t *first_move,
+ const char *indent,
+ apr_pool_t *result_pool);
+/* ...
+ */
+svn_error_t *
+svn_client__get_repos_moves(apr_hash_t **moves,
+ const char *anchor_abspath,
+ svn_ra_session_t *ra_session,
+ svn_revnum_t start,
+ svn_revnum_t end,
+ svn_client_ctx_t *ctx,
+ apr_pool_t *result_pool,
+ apr_pool_t *scratch_pool);
+
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* SVNMOVER_H */
+
diff --git a/tools/dev/svnmover/util.c b/tools/dev/svnmover/util.c
new file mode 100644
index 0000000..1ee018f
--- /dev/null
+++ b/tools/dev/svnmover/util.c
@@ -0,0 +1,59 @@
+/*
+ * util.c: Utility functions for 'svnmover'
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svnmover.h"
+
+#ifdef HAVE_LINENOISE
+#include "linenoise/linenoise.c"
+#else
+#include "svn_cmdline.h"
+#endif
+
+
+svn_error_t *
+svnmover_prompt_user(const char **result,
+ const char *prompt_str,
+ apr_pool_t *pool)
+{
+#ifdef HAVE_LINENOISE
+ char *input;
+
+ input = linenoise(prompt_str);
+ if (! input)
+ {
+ return svn_error_create(SVN_ERR_CANCELLED, NULL, NULL);
+ }
+ /* add the line to the recallable history (if non-empty) */
+ if (input && *input)
+ {
+ linenoiseHistoryAdd(input);
+ }
+ *result = apr_pstrdup(pool, input);
+ free(input);
+#else
+ SVN_ERR(svn_cmdline_prompt_user2(result, prompt_str, NULL, pool));
+#endif
+ return SVN_NO_ERROR;
+}
+
+
diff --git a/tools/dev/svnqlite3-dump b/tools/dev/svnqlite3-dump
new file mode 100755
index 0000000..1b94f38
--- /dev/null
+++ b/tools/dev/svnqlite3-dump
@@ -0,0 +1,50 @@
+#!/usr/bin/perl -lpw
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# USAGE:
+# sqlite3 .svn/wc.db .dump | $0
+# $0 /path/to/wc
+# $0 /path/to/wc/.svn/wc.db
+# DOES:
+# decodes blobs (eg, property skels) and dates to human-readable form
+# REQUIRES:
+# sqlite3(1) (second and third usage forms only)
+
+BEGIN {
+ # locate sqlite3
+ my $sqlite3 = $ENV{SQLITE3} || "sqlite3";
+ # set stdin
+ my $file = shift;
+ $file = "." if -t and not $file;
+ if ($file) {
+ $file .= "/.svn/wc.db" if -e "$file/.svn/wc.db";
+ close STDIN;
+ open STDIN, "-|", $sqlite3, $file, '.dump';
+ } else {
+ # filter stdin to stdout
+ }
+}
+
+# X'68656C6C6F' => "hello"
+1 while s/X'([0-9A-F]{2})/chr(hex $1) . q[X']/e;
+s/X''//g;
+s/\n/\\n/g; # multiline props
+
+# 1288312835000000 => "Fri Oct 29 02:40:35 2010"
+s/(?<=,)(\d\d\d\d\d\d\d\d\d\d)\d\d\d\d\d\d(?=,)/sprintf '"%s"', scalar localtime $1/eg;
diff --git a/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c b/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c
new file mode 100644
index 0000000..65825d5
--- /dev/null
+++ b/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c
@@ -0,0 +1,415 @@
+/* svnraisetreeconflict
+ *
+ * This is a crude command line tool that publishes API to create
+ * tree-conflict markings in a working copy.
+ *
+ * To compile this, go to the root of the Subversion source tree and
+ * call `make svnraisetreeconflict'. You will find the executable file
+ * next to this source file.
+ *
+ * If you want to "install" svnraisetreeconflict, you may call
+ * `make install-tools' in the Subversion source tree root.
+ * (Note: This also installs any other installable tools.)
+ *
+ * svnraisetreeconflict cannot be compiled separate from a Subversion
+ * source tree.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+#include "svn_utf.h"
+#include "svn_path.h"
+#include "svn_opt.h"
+#include "svn_version.h"
+
+#include "private/svn_wc_private.h"
+#include "private/svn_cmdline_private.h"
+
+#include "svn_private_config.h"
+
+#define OPT_VERSION SVN_OPT_FIRST_LONGOPT_ID
+
+static svn_error_t *
+version(apr_pool_t *pool)
+{
+ return svn_opt_print_help4(NULL, "svnraisetreeconflict", TRUE, FALSE, FALSE,
+ NULL, NULL, NULL, NULL, NULL, NULL, pool);
+}
+
+static void
+usage(apr_pool_t *pool)
+{
+ svn_error_clear(svn_cmdline_fprintf
+ (stderr, pool,
+ _("Type 'svnraisetreeconflict --help' for usage.\n")));
+}
+
+/***************************************************************************
+ * "enum mapping" functions copied from subversion/libsvn_wc/tree_conflicts.c
+ **************************************************************************/
+
+/* A mapping between a string STR and an enumeration value VAL. */
+typedef struct enum_mapping_t
+{
+ const char *str;
+ int val;
+} enum_mapping_t;
+
+/* A map for svn_node_kind_t values. */
+static const enum_mapping_t node_kind_map[] =
+{
+ { "none", svn_node_none },
+ { "file", svn_node_file },
+ { "dir", svn_node_dir },
+ { "unknown", svn_node_unknown },
+ { NULL, 0 }
+};
+
+/* A map for svn_wc_operation_t values. */
+static const enum_mapping_t operation_map[] =
+{
+ { "update", svn_wc_operation_update },
+ { "switch", svn_wc_operation_switch },
+ { "merge", svn_wc_operation_merge },
+ { NULL, 0 }
+};
+
+/* A map for svn_wc_conflict_action_t values. */
+static const enum_mapping_t action_map[] =
+{
+ { "edit", svn_wc_conflict_action_edit },
+ { "delete", svn_wc_conflict_action_delete },
+ { "add", svn_wc_conflict_action_add },
+ { NULL, 0 }
+};
+
+/* A map for svn_wc_conflict_reason_t values. */
+static const enum_mapping_t reason_map[] =
+{
+ { "edited", svn_wc_conflict_reason_edited },
+ { "deleted", svn_wc_conflict_reason_deleted },
+ { "missing", svn_wc_conflict_reason_missing },
+ { "obstructed", svn_wc_conflict_reason_obstructed },
+ { "added", svn_wc_conflict_reason_added },
+ { NULL, 0 }
+};
+
+/* Parse the enumeration field pointed to by *START into *RESULT as a plain
+ * 'int', using MAP to convert from strings to enumeration values.
+ * In MAP, a null STR field marks the end of the map.
+ * Don't read further than END.
+ * After reading, make *START point to the character after the field.
+ */
+static svn_error_t *
+read_enum_field(int *result,
+ const enum_mapping_t *map,
+ const char *str,
+ apr_pool_t *pool)
+{
+ int i;
+
+ /* Find STR in MAP; error if not found. */
+ for (i = 0; ; i++)
+ {
+ if (map[i].str == NULL)
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ "Unrecognised parameter value: '%s'", str);
+ if (strcmp(str, map[i].str) == 0)
+ break;
+ }
+
+ *result = map[i].val;
+ return SVN_NO_ERROR;
+}
+
+static const char*
+get_enum_str(const enum_mapping_t *map,
+ int enum_val)
+{
+ int i;
+ for (i = 0; map[i].str != NULL; i++)
+ {
+ if (map[i].val == enum_val)
+ return map[i].str;
+ }
+ return NULL;
+}
+
+static void
+print_enum_map(const enum_mapping_t *map,
+ apr_pool_t *pool)
+{
+ int i;
+ for (i = 0; map[i].str != NULL; i++)
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ " %s", map[i].str));
+}
+
+static svn_error_t *
+raise_tree_conflict(int argc, const char **argv, apr_pool_t *pool)
+{
+ int i = 0;
+ svn_wc_conflict_version_t *left, *right;
+ svn_wc_conflict_description2_t *c;
+ svn_wc_context_t *wc_ctx;
+
+ /* Conflict description parameters */
+ const char *wc_path, *wc_abspath;
+ const char *repos_url1, *repos_url2, *path_in_repos1, *path_in_repos2;
+ int operation, action, reason;
+ long peg_rev1, peg_rev2;
+ int kind, kind1, kind2;
+
+ if (argc != 13)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ "Wrong number of arguments");
+
+ /* Read the parameters */
+ wc_path = svn_dirent_internal_style(argv[i++], pool);
+ SVN_ERR(read_enum_field(&kind, node_kind_map, argv[i++], pool));
+ SVN_ERR(read_enum_field(&operation, operation_map, argv[i++], pool));
+ SVN_ERR(read_enum_field(&action, action_map, argv[i++], pool));
+ SVN_ERR(read_enum_field(&reason, reason_map, argv[i++], pool));
+ repos_url1 = argv[i++];
+ path_in_repos1 = argv[i++];
+ peg_rev1 = atol(argv[i++]);
+ SVN_ERR(read_enum_field(&kind1, node_kind_map, argv[i++], pool));
+ repos_url2 = argv[i++];
+ path_in_repos2 = argv[i++];
+ peg_rev2 = atol(argv[i++]);
+ SVN_ERR(read_enum_field(&kind2, node_kind_map, argv[i++], pool));
+
+
+ /* Allocate and fill in the description data structures */
+ SVN_ERR(svn_dirent_get_absolute(&wc_abspath, wc_path, pool));
+ left = svn_wc_conflict_version_create2(repos_url1, NULL, path_in_repos1,
+ peg_rev1, kind1, pool);
+ right = svn_wc_conflict_version_create2(repos_url2, NULL, path_in_repos2,
+ peg_rev2, kind2, pool);
+ c = svn_wc_conflict_description_create_tree2(wc_abspath, kind,
+ operation, left, right, pool);
+ c->action = (svn_wc_conflict_action_t)action;
+ c->reason = (svn_wc_conflict_reason_t)reason;
+
+ /* Raise the conflict */
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+ SVN_ERR(svn_wc__add_tree_conflict(wc_ctx, c, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+static void
+help(const apr_getopt_option_t *options, apr_pool_t *pool)
+{
+ svn_error_clear
+ (svn_cmdline_fprintf
+ (stdout, pool,
+ _("usage: svnraisetreeconflict [OPTIONS] WC_PATH NODE_KIND OPERATION ACTION REASON REPOS_URL1 PATH_IN_REPOS1 PEG_REV1 NODE_KIND1 REPOS_URL2 PATH_IN_REPOS2 PEG_REV2 NODE_KIND2\n\n"
+ " Mark the working-copy node WC_PATH as being the victim of a tree conflict.\n"
+ "\n"
+ " WC_PATH's parent directory must be a working copy, otherwise a\n"
+ " tree conflict cannot be raised.\n"
+ "\n"
+ "Valid options:\n")));
+ while (options->description)
+ {
+ const char *optstr;
+ svn_opt_format_option(&optstr, options, TRUE, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr));
+ ++options;
+ }
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("\n"
+ "Valid enum argument values:\n"
+ " NODE_KIND, NODE_KIND1, NODE_KIND2:\n"
+ " ")));
+ print_enum_map(node_kind_map, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("\n"
+ " OPERATION:\n"
+ " ")));
+ print_enum_map(operation_map, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("\n"
+ " ACTION (what svn tried to do):\n"
+ " ")));
+ print_enum_map(action_map, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("\n"
+ " REASON (what local change made svn fail):\n"
+ " ")));
+ print_enum_map(reason_map, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool,
+ _("\n"
+ " REPOS_URL1, REPOS_URL2:\n"
+ " The URL of the repository itself, e.g.: file://usr/repos\n"
+ " PATH_IN_REPOS1, PATH_IN_REPOS2:\n"
+ " The complete path of the node in the repository, e.g.: sub/dir/foo\n"
+ " PEG_REV1, PEG_REV2:\n"
+ " The revision number at which the given path is relevant.\n"
+ "\n"
+ "Example:\n"
+ " svnraisetreeconflict ./foo %s %s %s %s file://usr/repos sub/dir/foo 1 %s file://usr/repos sub/dir/foo 3 %s\n\n"),
+ get_enum_str(node_kind_map, svn_node_file),
+ get_enum_str(operation_map, svn_wc_operation_update),
+ get_enum_str(action_map, svn_wc_conflict_action_delete),
+ get_enum_str(reason_map, svn_wc_conflict_reason_deleted),
+ get_enum_str(node_kind_map, svn_node_file),
+ get_enum_str(node_kind_map, svn_node_none)
+ ));
+}
+
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_subr", svn_subr_version },
+ { "svn_wc", svn_wc_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ apr_getopt_t *os;
+ const apr_getopt_option_t options[] =
+ {
+ {"help", 'h', 0, N_("display this help")},
+ {"version", OPT_VERSION, 0,
+ N_("show program version information")},
+ {0, 0, 0, 0}
+ };
+ apr_array_header_t *remaining_argv;
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+#if defined(WIN32) || defined(__CYGWIN__)
+ /* Set the working copy administrative directory name. */
+ if (getenv("SVN_ASP_DOT_NET_HACK"))
+ {
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
+ }
+#endif
+
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+
+ os->interleave = 1;
+ while (1)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(os, options, &opt, &arg);
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ switch (opt)
+ {
+ case 'h':
+ help(options, pool);
+ return SVN_NO_ERROR;
+ case OPT_VERSION:
+ SVN_ERR(version(pool));
+ return SVN_NO_ERROR;
+ default:
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* Convert the remaining arguments to UTF-8. */
+ remaining_argv = apr_array_make(pool, 0, sizeof(const char *));
+ while (os->ind < argc)
+ {
+ const char *s;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++], pool));
+ APR_ARRAY_PUSH(remaining_argv, const char *) = s;
+ }
+
+ if (remaining_argv->nelts < 1)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Do the main task */
+ SVN_ERR(raise_tree_conflict(remaining_argv->nelts,
+ (const char **)remaining_argv->elts,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svnraisetreeconflict", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svnraisetreeconflict: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/dev/trails.py b/tools/dev/trails.py
new file mode 100755
index 0000000..917d234
--- /dev/null
+++ b/tools/dev/trails.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+## See the usage() function for operating instructions. ##
+
+import re
+try:
+ # Python >=2.6
+ from functools import reduce
+except ImportError:
+ # Python <2.6
+ pass
+import sys
+import operator
+
+_re_trail = re.compile('\((?P<txn_body>[a-z_]*), (?P<filename>[a-z_\-./]*), (?P<lineno>[0-9]*), (?P<txn>0|1)\): (?P<ops>.*)')
+_re_table_op = re.compile('\(([a-z]*), ([a-z]*)\)')
+
+_separator = '------------------------------------------------------------\n'
+
+def parse_trails_log(infile):
+ trails = []
+ lineno = 0
+ for line in infile.readlines():
+ m = _re_trail.match(line)
+
+ lineno = lineno + 1
+
+ if not m:
+ sys.stderr.write('Invalid input, line %u:\n%s\n' % (lineno, line))
+ sys.exit(1)
+
+ txn = int(m.group('txn'))
+ if not txn:
+ ### We're not interested in trails that don't use txns at this point.
+ continue
+
+ txn_body = (m.group('txn_body'), m.group('filename'),
+ int(m.group('lineno')))
+ trail = _re_table_op.findall(m.group('ops'))
+ trail.reverse()
+
+ if not trail:
+ sys.stderr.write('Warning! Empty trail at line %u:\n%s' % (lineno, line))
+
+ trails.append((txn_body, trail))
+
+ return trails
+
+
+def output_summary(trails, outfile):
+ ops = []
+ for (txn_body, trail) in trails:
+ ops.append(len(trail))
+ ops.sort()
+
+ total_trails = len(ops)
+ total_ops = reduce(operator.add, ops)
+ max_ops = ops[-1]
+ median_ops = ops[total_trails / 2]
+ average_ops = float(total_ops) / total_trails
+
+ outfile.write(_separator)
+ outfile.write('Summary\n')
+ outfile.write(_separator)
+ outfile.write('Total number of trails: %10i\n' % total_trails)
+ outfile.write('Total number of ops: %10i\n' % total_ops)
+ outfile.write('max ops/trail: %10i\n' % max_ops)
+ outfile.write('median ops/trail: %10i\n' % median_ops)
+ outfile.write('average ops/trail: %10.2f\n' % average_ops)
+ outfile.write('\n')
+
+
+# custom compare function
+def _freqtable_cmp(a_b, c_d):
+ (a, b) = a_b
+ (c, d) = c_d
+ c = cmp(d, b)
+ if not c:
+ c = cmp(a, c)
+ return c
+
+def list_frequencies(list):
+ """
+ Given a list, return a list composed of (item, frequency)
+ in sorted order
+ """
+
+ counter = {}
+ for item in list:
+ counter[item] = counter.get(item, 0) + 1
+
+ frequencies = list(counter.items())
+ frequencies.sort(_freqtable_cmp)
+
+ return frequencies
+
+
+def output_trail_length_frequencies(trails, outfile):
+ ops = []
+ for (txn_body, trail) in trails:
+ ops.append(len(trail))
+
+ total_trails = len(ops)
+ frequencies = list_frequencies(ops)
+
+ outfile.write(_separator)
+ outfile.write('Trail length frequencies\n')
+ outfile.write(_separator)
+ outfile.write('ops/trail frequency percentage\n')
+ for (r, f) in frequencies:
+ p = float(f) * 100 / total_trails
+ outfile.write('%4i %6i %5.2f\n' % (r, f, p))
+ outfile.write('\n')
+
+
+def output_trail(outfile, trail, column = 0):
+ ### Output the trail itself, in its own column
+
+ if len(trail) == 0:
+ outfile.write('<empty>\n')
+ return
+
+ line = str(trail[0])
+ for op in trail[1:]:
+ op_str = str(op)
+ if len(line) + len(op_str) > 75 - column:
+ outfile.write('%s,\n' % line)
+ outfile.write(''.join(' ' * column))
+ line = op_str
+ else:
+ line = line + ', ' + op_str
+ outfile.write('%s\n' % line)
+
+ outfile.write('\n')
+
+
+def output_trail_frequencies(trails, outfile):
+
+ total_trails = len(trails)
+
+ ttrails = []
+ for (txn_body, trail) in trails:
+ ttrails.append((txn_body, tuple(trail)))
+
+ frequencies = list_frequencies(ttrails)
+
+ outfile.write(_separator)
+ outfile.write('Trail frequencies\n')
+ outfile.write(_separator)
+ outfile.write('frequency percentage ops/trail trail\n')
+ for (((txn_body, file, line), trail), f) in frequencies:
+ p = float(f) * 100 / total_trails
+ outfile.write('-- %s - %s:%u --\n' % (txn_body, file, line))
+ outfile.write('%6i %5.2f %4i ' % (f, p, len(trail)))
+ output_trail(outfile, trail, 37)
+
+
+def output_txn_body_frequencies(trails, outfile):
+ bodies = []
+ for (txn_body, trail) in trails:
+ bodies.append(txn_body)
+
+ total_trails = len(trails)
+ frequencies = list_frequencies(bodies)
+
+ outfile.write(_separator)
+ outfile.write('txn_body frequencies\n')
+ outfile.write(_separator)
+ outfile.write('frequency percentage txn_body\n')
+ for ((txn_body, file, line), f) in frequencies:
+ p = float(f) * 100 / total_trails
+ outfile.write('%6i %5.2f %s - %s:%u\n'
+ % (f, p, txn_body, file, line))
+
+
+def usage(pgm):
+ w = sys.stderr.write
+ w("%s: a program for analyzing Subversion trail usage statistics.\n" % pgm)
+ w("\n")
+ w("Usage:\n")
+ w("\n")
+ w(" Compile Subversion with -DSVN_FS__TRAIL_DEBUG, which will cause it\n")
+ w(" it to print trail statistics to stderr. Save the stats to a file,\n")
+ w(" invoke %s on the file, and ponder the output.\n" % pgm)
+ w("\n")
+
+
+if __name__ == '__main__':
+ if len(sys.argv) > 2:
+ sys.stderr.write("Error: too many arguments\n\n")
+ usage(sys.argv[0])
+ sys.exit(1)
+
+ if len(sys.argv) == 1:
+ infile = sys.stdin
+ else:
+ try:
+ infile = open(sys.argv[1])
+ except (IOError):
+ sys.stderr.write("Error: unable to open '%s'\n\n" % sys.argv[1])
+ usage(sys.argv[0])
+ sys.exit(1)
+
+ trails = parse_trails_log(infile)
+
+ output_summary(trails, sys.stdout)
+ output_trail_length_frequencies(trails, sys.stdout)
+ output_trail_frequencies(trails, sys.stdout)
+ output_txn_body_frequencies(trails, sys.stdout)
diff --git a/tools/dev/unix-build/Makefile.svn b/tools/dev/unix-build/Makefile.svn
new file mode 100644
index 0000000..2c0561c
--- /dev/null
+++ b/tools/dev/unix-build/Makefile.svn
@@ -0,0 +1,2112 @@
+# vim: noexpandtab tabstop=8 shiftwidth=8 syntax=make
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# WARNING: This may or may not work on your system. This Makefile is
+# an example, rather than a ready-made universal solution.
+
+# ****************************************************************
+# ** IMPORTANT NOTE FOR SVN COMMITTERS: READ THIS. **
+# ** **
+# ****************************************************************
+# | This Makefile is used by the bb-openbsd buildbot |
+# | (http://ci.apache.org/builders/bb-openbsd). Please check |
+# | the bot's health after making changes to this file. |
+# |______________________________________________________________|
+
+PERL ?= yes
+ENABLE_PERL_BINDINGS = $(PERL)
+THREADING ?= yes
+ifeq ($(THREADING),yes)
+JAVA ?= yes
+else
+JAVA ?= no
+endif
+ENABLE_JAVA_BINDINGS = $(JAVA)
+USE_APR_ICONV ?= no # set to yes to use APR iconv instead of GNU iconv
+PARALLEL ?= 1
+CLEANUP ?= 1
+EXCLUSIVE_WC_LOCKS ?= 1
+USE_HTTPV1 ?= no
+USE_AUTHZ_SHORT_CIRCUIT ?= no
+RAMDISK ?= /ramdisk
+
+PWD = $(shell pwd)
+UNAME = $(shell uname)
+RUBY = $(shell which ruby 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby24 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby23 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby22 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby21 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby20 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby19 2>/dev/null)
+ifeq ($(RUBY),)
+RUBY = $(shell which ruby18)
+endif # 1.8
+endif # 1.9
+endif # 2.0
+endif # 2.1
+endif # 2.2
+endif # 2.3
+endif # 2.4
+
+TAG ?= none
+ifeq ($(TAG),none)
+BRANCH ?= trunk
+else
+BRANCH = $(TAG)
+endif
+WC ?= $(BRANCH)
+BRANCH_MAJOR = $(shell echo $(BRANCH) | \
+ sed -e 's/\([0-9]\)\.\([0-9]\)\.[x0-9].*$$/\1.\2/')
+SVN_REL_WC = svn-$(WC)
+SVN_WC = $(PWD)/$(SVN_REL_WC)
+PREFIX = $(PWD)/prefix
+SVN_PREFIX = $(PREFIX)/svn-$(WC)
+DISTDIR = $(PWD)/distfiles
+SRCDIR = $(PWD)/src
+OBJDIR = $(PWD)/objdir
+
+BDB_MAJOR_VER = 4.7
+BDB_VER = $(BDB_MAJOR_VER).25
+APR_VER = 1.5.2
+APR_ICONV_VER = 1.2.1
+GNU_ICONV_VER = 1.15
+APR_UTIL_VER = 1.5.4
+HTTPD_VER = 2.2.32
+NEON_VER = 0.30.2
+SERF_VER = 1.3.9
+SERF_OLD_VER = 0.3.1
+CYRUS_SASL_VER = 2.1.25
+SQLITE_VER = 3160200
+LIBMAGIC_VER = 5.30
+RUBY_VER = 2.4.2
+BZ2_VER = 1.0.6
+PYTHON_VER = 2.7.13
+JUNIT_VER = 4.10
+GETTEXT_VER = 0.19.8.1
+LZ4_VER = 1.7.5
+
+BDB_DIST = db-$(BDB_VER).tar.gz
+APR_ICONV_DIST = apr-iconv-$(APR_ICONV_VER).tar.gz
+GNU_ICONV_DIST = libiconv-$(GNU_ICONV_VER).tar.gz
+NEON_DIST = neon-$(NEON_VER).tar.gz
+SQLITE_DIST = sqlite-autoconf-$(SQLITE_VER).tar.gz
+CYRUS_SASL_DIST = cyrus-sasl-$(CYRUS_SASL_VER).tar.gz
+HTTPD_DIST = httpd-$(HTTPD_VER).tar.gz
+LIBMAGIC_DIST = file-$(LIBMAGIC_VER).tar.gz
+RUBY_DIST = ruby-$(RUBY_VER).tar.gz
+BZ2_DIST = bzip2-$(BZ2_VER).tar.gz
+PYTHON_DIST = Python-$(PYTHON_VER).tgz
+JUNIT_DIST = junit-${JUNIT_VER}.jar
+GETTEXT_DIST = gettext-$(GETTEXT_VER).tar.gz
+LZ4_DIST = lz4-$(LZ4_VER).tar.gz
+
+SHA256_${BDB_DIST} = f14fd96dd38915a1d63dcb94a63fbb8092334ceba6b5060760427096f631263e
+SHA256_${APR_ICONV_DIST} = 19381959d50c4a5f3b9c84d594a5f9ffb3809786919b3058281f4c87e1f4b245
+SHA256_${GNU_ICONV_DIST} = ccf536620a45458d26ba83887a983b96827001e92a13847b45e4925cc8913178
+SHA256_${HTTPD_DIST} = b6e1528779f99c301d6438d89ae892a311619b43a39f16297f9eabd4a8d16cb8
+SHA256_${NEON_DIST} = db0bd8cdec329b48f53a6f00199c92d5ba40b0f015b153718d1b15d3d967fbca
+SHA256_${CYRUS_SASL_DIST} = 418c16e6240a4f9b637cbe3d62937b9675627bad27c622191d47de8686fe24fe
+SHA256_${SQLITE_DIST} = 65cc0c3e9366f50c0679c5ccd31432cea894bc4a3e8947dabab88c8693263615
+SHA256_${LIBMAGIC_DIST} = 694c2432e5240187524c9e7cf1ec6acc77b47a0e19554d34c14773e43dbbf214
+SHA256_${RUBY_DIST} = 93b9e75e00b262bc4def6b26b7ae8717efc252c47154abb7392e54357e6c8c9c
+SHA256_${BZ2_DIST} = a2848f34fcd5d6cf47def00461fcb528a0484d8edef8208d6d2e2909dc61d9cd
+SHA256_${PYTHON_DIST} = a4f05a0720ce0fd92626f0278b6b433eee9a6173ddf2bced7957dfb599a5ece1
+SHA256_${JUNIT_DIST} = 36a747ca1e0b86f6ea88055b8723bb87030d627766da6288bf077afdeeb0f75a
+SHA256_${GETTEXT_DIST} = ff942af0e438ced4a8b0ea4b0b6e0d6d657157c5e2364de57baa279c1c125c43
+SHA256_${LZ4_DIST} = 0190cacd63022ccb86f44fa5041dc6c3804407ad61550ca21c382827319e7e7e
+
+define do_check_sha256
+if [ -x /bin/sha256 ]; then \
+ (cd $(DISTDIR) && \
+ echo "SHA256 (${1}) = ${SHA256_${1}}" | /bin/sha256 -C /dev/stdin "${1}"); \
+elif [ -x /usr/bin/sha256sum ]; then \
+ (cd $(DISTDIR) && \
+ echo "${SHA256_${1}} ${1}" | /usr/bin/sha256sum --quiet --check); \
+else \
+ echo "Error: No tool found to verify checksum"; \
+ false; \
+fi
+endef
+
+DISTFILES = $(DISTDIR)/$(NEON_DIST) \
+ $(DISTDIR)/$(SERF_DIST) \
+ $(DISTDIR)/$(SQLITE_DIST) \
+ $(DISTDIR)/$(HTTPD_DIST) \
+ $(DISTDIR)/$(APR_ICONV_DIST) \
+ $(DISTDIR)/$(GNU_ICONV_DIST) \
+ $(DISTDIR)/$(CYRUS_SASL_DIST) \
+ $(DISTDIR)/$(LIBMAGIC_DIST) \
+ $(DISTDIR)/$(RUBY_DIST) \
+ $(DISTDIR)/$(BZ2_DIST) \
+ $(DISTDIR)/$(PYTHON_DIST) \
+ $(DISTDIR)/$(JUNIT_DIST) \
+ $(DISTDIR)/$(GETTEXT_DIST)
+
+FETCH_CMD = wget -c
+
+SUBVERSION_REPOS_URL = https://svn.apache.org/repos/asf/subversion
+BDB_URL = http://download.oracle.com/berkeley-db/$(BDB_DIST)
+APR_URL = https://svn.apache.org/repos/asf/apr/apr
+APR_ICONV_URL = https://www.apache.org/dist/apr/$(APR_ICONV_DIST)
+GNU_ICONV_URL = https://ftp.gnu.org/pub/gnu/libiconv/$(GNU_ICONV_DIST)
+APR_UTIL_URL = https://svn.apache.org/repos/asf/apr/apr-util
+HTTPD_URL = https://archive.apache.org/dist/httpd/$(HTTPD_DIST)
+NEON_URL = http://webdav.org/neon/$(NEON_DIST)
+SERF_URL = https://svn.apache.org/repos/asf/serf/tags/$(SERF_VER)
+SERF_OLD_URL = https://svn.apache.org/repos/asf/serf/tags/$(SERF_OLD_VER)
+SQLITE_URL = https://www.sqlite.org/2017/$(SQLITE_DIST)
+CYRUS_SASL_URL = ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/$(CYRUS_SASL_DIST)
+LIBMAGIC_URL = ftp://ftp.astron.com/pub/file/$(LIBMAGIC_DIST)
+RUBY_URL = https://cache.ruby-lang.org/pub/ruby/2.4/$(RUBY_DIST)
+BZ2_URL = http://bzip.org/$(BZ2_VER)/$(BZ2_DIST)
+PYTHON_URL = https://python.org/ftp/python/$(PYTHON_VER)/$(PYTHON_DIST)
+JUNIT_URL = https://downloads.sourceforge.net/project/junit/junit/$(JUNIT_VER)/$(JUNIT_DIST)
+GETTEXT_URL = https://ftp.gnu.org/pub/gnu/gettext/$(GETTEXT_DIST)
+LZ4_URL = https://github.com/lz4/lz4/archive/v$(LZ4_VER).tar.gz
+
+
+BDB_SRCDIR = $(SRCDIR)/db-$(BDB_VER)
+APR_SRCDIR = $(SRCDIR)/apr-$(APR_VER)
+APR_ICONV_SRCDIR = $(SRCDIR)/apr-iconv-$(APR_ICONV_VER)
+GNU_ICONV_SRCDIR = $(SRCDIR)/libiconv-$(GNU_ICONV_VER)
+APR_UTIL_SRCDIR = $(SRCDIR)/apr-util-$(APR_UTIL_VER)
+HTTPD_SRCDIR = $(SRCDIR)/httpd-$(HTTPD_VER)
+NEON_SRCDIR = $(SRCDIR)/neon-$(NEON_VER)
+SERF_SRCDIR = $(SRCDIR)/serf-$(SERF_VER)
+SERF_OLD_SRCDIR = $(SRCDIR)/serf-$(SERF_OLD_VER)
+SQLITE_SRCDIR = $(SRCDIR)/sqlite-autoconf-$(SQLITE_VER)
+CYRUS_SASL_SRCDIR = $(SRCDIR)/cyrus-sasl-$(CYRUS_SASL_VER)
+LIBMAGIC_SRCDIR = $(SRCDIR)/file-$(LIBMAGIC_VER)
+RUBY_SRCDIR = $(SRCDIR)/ruby-$(RUBY_VER)
+BZ2_SRCDIR = $(SRCDIR)/bzip2-$(BZ2_VER)
+PYTHON_SRCDIR = $(SRCDIR)/Python-$(PYTHON_VER)
+GETTEXT_SRCDIR = $(SRCDIR)/gettext-$(GETTEXT_VER)
+LZ4_SRCDIR = ${SRCDIR}/lz4-$(LZ4_VER)
+SVN_SRCDIR = $(SVN_WC)
+
+BDB_OBJDIR = $(OBJDIR)/db-$(BDB_VER)
+APR_OBJDIR = $(OBJDIR)/apr-$(APR_VER)
+APR_ICONV_OBJDIR = $(OBJDIR)/apr-iconv-$(APR_ICONV_VER)
+GNU_ICONV_OBJDIR = $(OBJDIR)/libiconv-$(GNU_ICONV_VER)
+APR_UTIL_OBJDIR = $(OBJDIR)/apr-util-$(APR_UTIL_VER)
+HTTPD_OBJDIR = $(OBJDIR)/httpd-$(HTTPD_VER)
+NEON_OBJDIR = $(OBJDIR)/neon-$(NEON_VER)
+SERF_OBJDIR = $(OBJDIR)/serf-$(SERF_VER)
+SERF_OLD_OBJDIR = $(OBJDIR)/serf-$(SERF_OLD_VER)
+SQLITE_OBJDIR = $(OBJDIR)/sqlite-$(SQLITE_VER)
+CYRUS_SASL_OBJDIR = $(OBJDIR)/cyrus-sasl-$(CYRUS_SASL_VER)
+LIBMAGIC_OBJDIR = $(OBJDIR)/file-$(LIBMAGIC_VER)
+RUBY_OBJDIR = $(OBJDIR)/ruby-$(RUBY_VER)
+BZ2_OBJDIR = $(OBJDIR)/bzip2-$(BZ2_VER)
+PYTHON_OBJDIR = $(OBJDIR)/python-$(PYTHON_VER)
+GETTEXT_OBJDIR = $(OBJDIR)/gettext-$(GETTEXT_VER)
+LZ4_OBJDIR = ${OBJDIR}/lz4-$(LZ4_VER)
+SVN_OBJDIR = $(OBJDIR)/$(SVN_REL_WC)
+
+# Tweak this for out-of-tree builds. Note that running individual
+# tests in the test suite won't work conveniently with out-of-tree
+# builds!
+svn_builddir ?=$(SVN_WC)
+
+ifdef PROFILE
+PROFILE_CFLAGS=-pg
+endif
+
+# We need this to make sure some targets below pick up the right libraries
+LD_LIBRARY_PATH=$(PREFIX)/apr/lib:$(PREFIX)/gettext/lib:$(PREFIX)/iconv/lib:$(PREFIX)/bdb/lib:$(PREFIX)/neon/lib:$(PREFIX)/serf/lib:$(PREFIX)/sqlite/lib:$(PREFIX)/cyrus-sasl/lib:$(PREFIX)/iconv/lib:$(PREFIX)/libmagic/lib:$(PREFIX)/ruby/lib:$(PREFIX)/python/lib:$(PREFIX)/svn-$(WC)/lib
+
+#######################################################################
+# Main targets.
+#######################################################################
+
+.PHONY: all reset clean nuke fetch
+
+all: dirs-create bdb-install apr-install iconv-install apr-util-install \
+ httpd-install neon-install serf-install serf-old-install \
+ sqlite-install cyrus-sasl-install libmagic-install \
+ ruby-install bz2-install python-install gettext-install \
+ lz4-install svn-install svn-bindings-install
+
+# Use these to start a build from the beginning.
+reset: dirs-reset bdb-reset apr-reset iconv-reset apr-util-reset \
+ httpd-reset neon-reset serf-reset serf-old-reset sqlite-reset \
+ cyrus-sasl-reset libmagic-reset ruby-reset python-reset \
+ bz2-reset gettext-reset lz4-reset svn-reset
+
+# Use to save disk space.
+clean: bdb-clean apr-clean iconv-clean apr-util-clean httpd-clean \
+ neon-clean serf-clean serf-old-clean sqlite-clean cyrus-sasl-clean \
+ libmagic-clean ruby-clean bz2-clean python-clean gettext-clean \
+ lz4-clean svn-clean
+
+# Nukes everything (including installed binaries!)
+# Use this to start ALL OVER AGAIN! Use with caution!
+nuke:
+ @echo
+ @echo "I will now remove the following directories PERMANENTLY:"
+ @echo
+ @echo " $(SRCDIR)"
+ @echo " $(OBJDIR)"
+ @echo " $(PREFIX)"
+ @echo
+ @echo -n 'Do you want me to continue? ([no]/yes): '
+ @read ANSWER ; \
+ case $$ANSWER in \
+ yes) echo "You said $$ANSWER. I will continue."; \
+ echo rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \
+ rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \
+ $(MAKE) reset; \
+ ;; \
+ "") echo "You said no."; \
+ ;; \
+ *) echo "You said $$ANSWER."; \
+ ;; \
+ esac
+
+fetch: $(DISTFILES)
+
+#######################################################################
+# directories
+#######################################################################
+
+dirs-create: $(PWD)/.dirs-created
+dirs-reset:
+ rm -f $(PWD)/.dirs-created
+
+$(PWD)/.dirs-created:
+ $(foreach d, $(PREFIX) $(DISTDIR) $(SRCDIR) $(OBJDIR), \
+ [ -d $(d) ] || mkdir -p $(d);)
+ touch $@
+
+#######################################################################
+# bdb
+#######################################################################
+
+bdb-retrieve: $(BDB_OBJDIR)/.retrieved
+bdb-configure: $(BDB_OBJDIR)/.configured
+bdb-compile: $(BDB_OBJDIR)/.compiled
+bdb-install: $(BDB_OBJDIR)/.installed
+bdb-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(BDB_OBJDIR)/$(f);)
+
+bdb-clean:
+ -(cd $(BDB_SRCDIR)/build_unix/ && env MAKEFLAGS= make clean)
+
+# fetch distfile for bdb
+$(DISTDIR)/$(BDB_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(BDB_URL)
+
+# retrieve bdb
+$(BDB_OBJDIR)/.retrieved: $(DISTDIR)/$(BDB_DIST)
+ $(call do_check_sha256,$(BDB_DIST))
+ [ -d $(BDB_OBJDIR) ] || mkdir -p $(BDB_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(BDB_DIST)
+ touch $@
+
+# configure bdb
+$(BDB_OBJDIR)/.configured: $(BDB_OBJDIR)/.retrieved
+ cd $(BDB_SRCDIR)/build_unix \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ ../dist/configure \
+ --prefix=$(PREFIX)/bdb \
+ --enable-debug
+ touch $@
+
+# compile bdb
+$(BDB_OBJDIR)/.compiled: $(BDB_OBJDIR)/.configured
+ (cd $(BDB_SRCDIR)/build_unix && env MAKEFLAGS= make)
+ touch $@
+
+# install bdb
+$(BDB_OBJDIR)/.installed: $(BDB_OBJDIR)/.compiled
+ (cd $(BDB_SRCDIR)/build_unix && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# apr
+#######################################################################
+
+apr-retrieve: $(APR_OBJDIR)/.retrieved
+apr-configure: $(APR_OBJDIR)/.configured
+apr-compile: $(APR_OBJDIR)/.compiled
+apr-install: $(APR_OBJDIR)/.installed
+apr-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(APR_OBJDIR)/$(f);)
+
+apr-clean:
+ -(cd $(APR_OBJDIR) && env MAKEFLAGS= make clean)
+
+# retrieve apr if not present yet
+$(APR_OBJDIR)/.retrieved:
+ [ -d $(APR_OBJDIR) ] || mkdir -p $(APR_OBJDIR)
+ if [ ! -d $(APR_SRCDIR) ]; then \
+ svn export $(APR_URL)/tags/$(APR_VER)/ $(APR_SRCDIR); \
+ fi
+ touch $@
+
+ifeq ($(THREADING),yes)
+THREADS_FLAG=--enable-threads
+else
+THREADS_FLAG=--disable-threads
+endif
+
+ifdef POOL_DEBUG
+POOL_DEBUG_FLAG=--enable-pool-debug=all
+else
+# Map apr_palloc()/apr_pool_{clear,destroy}() to malloc()/free().
+# This also puts poison bytes into freed memory to help detect use after free.
+POOL_DEBUG_FLAG=--enable-pool-debug=yes
+endif
+
+# configure apr
+$(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved $(BDB_OBJDIR)/.installed
+ cd $(APR_SRCDIR) && ./buildconf
+ cd $(APR_OBJDIR) \
+ && env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ $(APR_SRCDIR)/configure \
+ --prefix=$(PREFIX)/apr \
+ --enable-maintainer-mode \
+ $(THREADS_FLAG) \
+ $(POOL_DEBUG_FLAG)
+ touch $@
+
+# compile apr
+$(APR_OBJDIR)/.compiled: $(APR_OBJDIR)/.configured
+ (cd $(APR_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install apr
+$(APR_OBJDIR)/.installed: $(APR_OBJDIR)/.compiled
+ (cd $(APR_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# apr-iconv
+#######################################################################
+
+apr-iconv-retrieve: $(APR_ICONV_OBJDIR)/.retrieved
+apr-iconv-configure: $(APR_ICONV_OBJDIR)/.configured
+apr-iconv-compile: $(APR_ICONV_OBJDIR)/.compiled
+apr-iconv-install: $(APR_ICONV_OBJDIR)/.installed
+apr-iconv-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(APR_ICONV_OBJDIR)/$(f);)
+
+apr-iconv-clean:
+ -(cd $(APR_ICONV_OBJDIR) && env MAKEFLAGS= make clean)
+
+# fetch distfile for apr-iconv
+$(DISTDIR)/$(APR_ICONV_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(APR_ICONV_URL)
+
+# retrieve apr-iconv
+$(APR_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(APR_ICONV_DIST)
+ $(call do_check_sha256,$(APR_ICONV_DIST))
+ [ -d $(APR_ICONV_OBJDIR) ] || mkdir -p $(APR_ICONV_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(APR_ICONV_DIST)
+ touch $@
+
+# configure apr-iconv
+$(APR_ICONV_OBJDIR)/.configured: $(APR_ICONV_OBJDIR)/.retrieved \
+ $(APR_OBJDIR)/.installed
+ cd $(APR_ICONV_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ GREP="`which grep`" \
+ $(APR_ICONV_SRCDIR)/configure \
+ --prefix=$(PREFIX)/apr \
+ --with-apr=$(PREFIX)/apr
+ touch $@
+
+# compile apr-iconv
+$(APR_ICONV_OBJDIR)/.compiled: $(APR_ICONV_OBJDIR)/.configured
+ (cd $(APR_ICONV_OBJDIR) \
+ && env MAKEFLAGS= make CPPFLAGS="-D_OSD_POSIX" CFLAGS="-g -O0 $(PROFILE_CFLAGS)")
+ touch $@
+
+# install apr-iconv
+$(APR_ICONV_OBJDIR)/.installed: $(APR_ICONV_OBJDIR)/.compiled
+ (cd $(APR_ICONV_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# gnu-iconv
+#######################################################################
+
+gnu-iconv-retrieve: $(GNU_ICONV_OBJDIR)/.retrieved
+gnu-iconv-configure: $(GNU_ICONV_OBJDIR)/.configured
+gnu-iconv-compile: $(GNU_ICONV_OBJDIR)/.compiled
+gnu-iconv-install: $(GNU_ICONV_OBJDIR)/.installed
+gnu-iconv-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(GNU_ICONV_OBJDIR)/$(f);)
+
+gnu-iconv-clean:
+ -(cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make clean)
+ rm -f $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff
+ rm -f $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff
+
+# fetch distfile for gnu-iconv
+$(DISTDIR)/$(GNU_ICONV_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(GNU_ICONV_URL)
+
+$(GNU_ICONV_OBJDIR)/lib_encodings.def.diff:
+ mkdir -p $(dir $@)
+ echo > $@.tmp '--- lib/encodings.def.orig Wed Oct 24 23:41:41 2007'
+ echo >>$@.tmp '+++ lib/encodings.def Wed Oct 24 23:43:47 2007'
+ echo >>$@.tmp '@@ -37,6 +37,7 @@'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp ' '
+ echo >>$@.tmp ' DEFENCODING(( "US-ASCII", /* IANA */'
+ echo >>$@.tmp '+ "646",'
+ echo >>$@.tmp ' "ASCII", /* IANA, JDK 1.1 */'
+ echo >>$@.tmp ' "ISO646-US", /* IANA */'
+ echo >>$@.tmp ' "ISO_646.IRV:1991", /* IANA */'
+ mv -f $@.tmp $@
+
+$(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff:
+ mkdir -p $(dir $@)
+ echo > $@.tmp '--- lib/aliases.gperf.orig Wed Oct 24 23:41:32 2007'
+ echo >>$@.tmp '+++ lib/aliases.gperf Wed Oct 24 23:47:38 2007'
+ echo >>$@.tmp '@@ -10,6 +10,7 @@ struct alias { int name; unsigned int encoding_index; '
+ echo >>$@.tmp ' %pic'
+ echo >>$@.tmp ' %%'
+ echo >>$@.tmp ' US-ASCII, ei_ascii'
+ echo >>$@.tmp '+646, ei_ascii'
+ echo >>$@.tmp ' ASCII, ei_ascii'
+ echo >>$@.tmp ' ISO646-US, ei_ascii'
+ echo >>$@.tmp ' ISO_646.IRV:1991, ei_ascii'
+ mv -f $@.tmp $@
+
+# retrieve gnu-iconv
+# Add 646 as an alias for ASCII to fix prop_test 22 on OpenBSD
+$(GNU_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(GNU_ICONV_DIST) \
+ $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff \
+ $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff
+ $(call do_check_sha256,$(GNU_ICONV_DIST))
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(GNU_ICONV_DIST)
+ cd $(SRCDIR)/libiconv-$(GNU_ICONV_VER) && \
+ patch -p0 < $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff && \
+ patch -p0 < $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff
+ touch $@
+
+# configure gnu-iconv
+$(GNU_ICONV_OBJDIR)/.configured: $(GNU_ICONV_OBJDIR)/.retrieved
+ cd $(SRCDIR)/libiconv-${GNU_ICONV_VER} && \
+ ${MAKE} -f Makefile.devel lib/aliases.h
+ cd $(GNU_ICONV_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\
+ $(GNU_ICONV_SRCDIR)/configure \
+ --prefix=$(PREFIX)/iconv \
+ --enable-extra-encodings
+ touch $@
+
+# compile gnu-iconv
+$(GNU_ICONV_OBJDIR)/.compiled: $(GNU_ICONV_OBJDIR)/.configured
+ (cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install gnu-iconv
+$(GNU_ICONV_OBJDIR)/.installed: $(GNU_ICONV_OBJDIR)/.compiled
+ (cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# iconv
+#######################################################################
+
+.PHONY: iconv-install iconv-reset iconv-clean
+
+ifeq ($(USE_APR_ICONV),yes)
+iconv-install: apr-iconv-install
+iconv-reset: apr-iconv-reset
+iconv-clean: apr-iconv-clean
+else
+iconv-install: gnu-iconv-install
+iconv-reset: gnu-iconv-reset
+iconv-clean: gnu-iconv-clean
+endif
+
+#######################################################################
+# apr-util
+#######################################################################
+
+apr-util-retrieve: $(APR_UTIL_OBJDIR)/.retrieved
+apr-util-configure: $(APR_UTIL_OBJDIR)/.configured
+apr-util-compile: $(APR_UTIL_OBJDIR)/.compiled
+apr-util-install: $(APR_UTIL_OBJDIR)/.installed
+apr-util-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(APR_UTIL_OBJDIR)/$(f);)
+
+apr-util-clean:
+ -(cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make clean)
+
+
+# retrieve apr-util if not present yet
+$(APR_UTIL_OBJDIR)/.retrieved:
+ [ -d $(APR_UTIL_OBJDIR) ] || mkdir -p $(APR_UTIL_OBJDIR)
+ if [ ! -d $(APR_UTIL_SRCDIR) ]; then \
+ svn export $(APR_UTIL_URL)/tags/$(APR_UTIL_VER)/ \
+ $(APR_UTIL_SRCDIR); \
+ fi
+ touch $@
+
+ifeq ($(USE_APR_ICONV),yes)
+ICONV_FLAG=--with-iconv=$(PREFIX)/apr
+ICONV_OBJDIR=$(APR_ICONV_OBJDIR)
+else
+ICONV_FLAG=--with-iconv=$(PREFIX)/iconv
+ICONV_OBJDIR=$(GNU_ICONV_OBJDIR)
+endif
+
+# configure apr-util
+$(APR_UTIL_OBJDIR)/.configured: $(APR_UTIL_OBJDIR)/.retrieved \
+ $(APR_OBJDIR)/.installed $(ICONV_OBJDIR)/.installed
+ cd $(APR_UTIL_SRCDIR) && ./buildconf --with-apr=$(APR_SRCDIR)
+ cd $(APR_UTIL_OBJDIR) \
+ && env LD_LIBRARY_PATH=$(PREFIX)/bdb/lib \
+ CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ GREP="`which grep`" \
+ $(APR_UTIL_SRCDIR)/configure \
+ --prefix=$(PREFIX)/apr \
+ --enable-maintainer-mode \
+ --with-apr=$(PREFIX)/apr \
+ --with-berkeley-db=$(PREFIX)/bdb \
+ $(ICONV_FLAG)
+ touch $@
+
+# compile apr-util
+$(APR_UTIL_OBJDIR)/.compiled: $(APR_UTIL_OBJDIR)/.configured
+ (cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install apr-util
+$(APR_UTIL_OBJDIR)/.installed: $(APR_UTIL_OBJDIR)/.compiled
+ (cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# httpd
+#######################################################################
+
+HTTPD_CONF= $(PREFIX)/httpd/conf/httpd-$(SVN_REL_WC).conf
+httpd-retrieve: $(HTTPD_OBJDIR)/.retrieved
+httpd-configure: $(HTTPD_OBJDIR)/.configured
+httpd-compile: $(HTTPD_OBJDIR)/.compiled
+httpd-install: $(HTTPD_OBJDIR)/.installed $(HTTPD_CONF)
+httpd-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(HTTPD_OBJDIR)/$(f);)
+
+httpd-clean:
+ -(cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make clean)
+ -rm ${HTTPD_OBJDIR}/chil-engine.diff
+ -rm ${HTTPD_OBJDIR}/ssl-set-state.diff
+ -rm ${HTTPD_OBJDIR}/acinclude.diff
+
+# fetch distfile for httpd
+$(DISTDIR)/$(HTTPD_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(HTTPD_URL)
+
+$(HTTPD_OBJDIR)/chil-engine.diff:
+ mkdir -p $(dir $@)
+ echo > $@.tmp '--- modules/ssl/ssl_engine_init.c.orig Mon Apr 14 13:20:57 2014'
+ echo >>$@.tmp '+++ modules/ssl/ssl_engine_init.c Mon Apr 14 13:21:22 2014'
+ echo >>$@.tmp '@@ -406,9 +406,11 @@ void ssl_init_Engine(server_rec *s, apr_pool_t *p)'
+ echo >>$@.tmp ' ssl_die();'
+ echo >>$@.tmp ' }'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp '+#ifdef ENGINE_CTRL_CHIL_SET_FORKCHECK'
+ echo >>$@.tmp ' if (strEQ(mc->szCryptoDevice, "chil")) {'
+ echo >>$@.tmp ' ENGINE_ctrl(e, ENGINE_CTRL_CHIL_SET_FORKCHECK, 1, 0, 0);'
+ echo >>$@.tmp ' }'
+ echo >>$@.tmp '+#endif'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp ' if (!ENGINE_set_default(e, ENGINE_METHOD_ALL)) {'
+ echo >>$@.tmp ' ap_log_error(APLOG_MARK, APLOG_ERR, 0, s,'
+ mv -f $@.tmp $@
+
+$(HTTPD_OBJDIR)/ssl-set-state.diff:
+ mkdir -p $(dir $@)
+ echo > $@.tmp '--- modules/ssl/ssl_toolkit_compat.h.orig Fri Feb 3 10:47:33 2017'
+ echo >>$@.tmp '+++ modules/ssl/ssl_toolkit_compat.h Fri Feb 3 10:52:17 2017'
+ echo >>$@.tmp '@@ -84,7 +84,9 @@'
+ echo >>$@.tmp ' #define SSL_get_state(ssl) SSL_state(ssl)'
+ echo >>$@.tmp ' #endif'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp '+#ifndef HAVE_SSL_SET_STATE'
+ echo >>$@.tmp ' #define SSL_set_state(ssl,val) (ssl)->state = val'
+ echo >>$@.tmp '+#endif'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp ' #define MODSSL_BIO_CB_ARG_TYPE const char'
+ echo >>$@.tmp ' #define MODSSL_CRYPTO_CB_ARG_TYPE const char'
+ mv -f $@.tmp $@
+
+$(HTTPD_OBJDIR)/acinclude.diff:
+ mkdir -p $(dir $@)
+ echo >$@.tmp '--- acinclude.m4.orig Fri Feb 3 11:05:08 2017'
+ echo >>$@.tmp '+++ acinclude.m4 Fri Feb 3 11:05:15 2017'
+ echo >>$@.tmp '@@ -455,6 +455,7 @@'
+ echo >>$@.tmp ' AC_CHECK_HEADERS([openssl/engine.h])'
+ echo >>$@.tmp ' AC_CHECK_FUNCS([SSLeay_version SSL_CTX_new], [], [liberrors="yes"])'
+ echo >>$@.tmp ' AC_CHECK_FUNCS([ENGINE_init ENGINE_load_builtin_engines])'
+ echo >>$@.tmp '+ AC_CHECK_FUNCS(SSL_set_state)'
+ echo >>$@.tmp ' else'
+ echo >>$@.tmp ' AC_CHECK_FUNCS([SSLC_library_version SSL_CTX_new], [], [liberrors="yes"])'
+ echo >>$@.tmp ' AC_CHECK_FUNCS(SSL_set_state)'
+ mv -f $@.tmp $@
+
+# retrieve httpd
+$(HTTPD_OBJDIR)/.retrieved: $(DISTDIR)/$(HTTPD_DIST) \
+ $(HTTPD_OBJDIR)/chil-engine.diff $(HTTPD_OBJDIR)/ssl-set-state.diff \
+ $(HTTPD_OBJDIR)/acinclude.diff
+ $(call do_check_sha256,$(HTTPD_DIST))
+ [ -d $(HTTPD_OBJDIR) ] || mkdir -p $(HTTPD_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(HTTPD_DIST)
+ cd $(HTTPD_SRCDIR) && patch -p0 < $(HTTPD_OBJDIR)/chil-engine.diff
+ cd $(HTTPD_SRCDIR) && patch -p0 < $(HTTPD_OBJDIR)/ssl-set-state.diff
+ cd $(HTTPD_SRCDIR) && patch -p0 < $(HTTPD_OBJDIR)/acinclude.diff
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h.orig
+ sed '/^#define HAVE_SSL_RAND_EGD/d' \
+ < $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c.orig
+ sed 's/^\(#if (OPENSSL_VERSION_NUMBER >= 0x00908000)\)$$/\1 \&\& !defined(OPENSSL_NO_COMP)/' \
+ < $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_init.c \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_init.c.orig
+ $(foreach f, ssl_engine_init.c ssl_util_ssl.c ssl_util_ssl.h, \
+ cp $(HTTPD_SRCDIR)/modules/ssl/${f} $(HTTPD_SRCDIR)/modules/ssl/${f}.orig; \
+ sed 's/SSL_CTX_use_certificate_chain/_SSL_CTX_use_certificate_chain/' \
+ < $(HTTPD_SRCDIR)/modules/ssl/${f}.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/${f};\
+ )
+ touch $@
+
+# configure httpd
+$(HTTPD_OBJDIR)/.configured: $(HTTPD_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
+ cd $(HTTPD_SRCDIR) && ./buildconf
+ cd $(HTTPD_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ GREP="`which grep`" \
+ $(HTTPD_SRCDIR)/configure \
+ --prefix=$(PREFIX)/httpd \
+ --enable-maintainer-mode \
+ --enable-ssl \
+ --enable-dav \
+ --enable-proxy \
+ --with-mpm=prefork \
+ --with-apr="$(PREFIX)/apr" \
+ --with-apr-util="$(PREFIX)/apr"
+ touch $@
+
+# compile httpd
+$(HTTPD_OBJDIR)/.compiled: $(HTTPD_OBJDIR)/.configured
+ (cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install httpd
+$(HTTPD_OBJDIR)/.installed: $(HTTPD_OBJDIR)/.compiled
+ (cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+# create a httpd.conf for mod_dav_svn
+$(HTTPD_CONF):
+ mkdir -p $(dir $@)
+ echo > $@.tmp '# httpd config for use with mod_dav_svn'
+ echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"'
+ echo >>$@.tmp 'Listen localhost:8080'
+ echo >>$@.tmp 'LoadModule dav_svn_module modules/svn-$(WC)/mod_dav_svn.so'
+ echo >>$@.tmp 'LoadModule authz_svn_module modules/svn-$(WC)/mod_authz_svn.so'
+ echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"'
+ echo >>$@.tmp '<Directory />'
+ echo >>$@.tmp ' Options FollowSymLinks'
+ echo >>$@.tmp ' AllowOverride None'
+ echo >>$@.tmp ' Order deny,allow'
+ echo >>$@.tmp ' Deny from all'
+ echo >>$@.tmp '</Directory>'
+ echo >>$@.tmp '<Location /repos>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNPath /tmp/svn-sandbox/repos'
+ echo >>$@.tmp ' Allow from localhost'
+ echo >>$@.tmp '</Location>'
+ mv -f $@.tmp $@
+
+#######################################################################
+# neon
+#######################################################################
+
+neon-retrieve: $(NEON_OBJDIR)/.retrieved
+neon-configure: $(NEON_OBJDIR)/.configured
+neon-compile: $(NEON_OBJDIR)/.compiled
+neon-install: $(NEON_OBJDIR)/.installed
+neon-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(NEON_OBJDIR)/$(f);)
+
+neon-clean:
+ -(cd $(NEON_OBJDIR) && env MAKEFLAGS= make clean)
+
+# fetch distfile for neon
+$(DISTDIR)/$(NEON_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(NEON_URL)
+
+# retrieve neon
+$(NEON_OBJDIR)/.retrieved: $(DISTDIR)/$(NEON_DIST)
+ $(call do_check_sha256,$(NEON_DIST))
+ [ -d $(NEON_OBJDIR) ] || mkdir -p $(NEON_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(NEON_DIST)
+ touch $@
+
+# OpenBSD does not have krb5-config in PATH, but the neon port has
+# a suitable replacement.
+ifeq ($(UNAME),OpenBSD)
+KRB5_CONFIG_PATH=/usr/ports/net/neon/files
+endif
+
+# configure neon
+$(NEON_OBJDIR)/.configured: $(NEON_OBJDIR)/.retrieved
+ cd $(NEON_SRCDIR) && ./autogen.sh
+ if [ -n "$(KRB5_CONFIG_PATH)" ] && [ -d "$(KRB5_CONFIG_PATH)" ]; then \
+ cp $(KRB5_CONFIG_PATH)/krb5-config $(NEON_OBJDIR); \
+ chmod +x $(NEON_OBJDIR)/krb5-config; \
+ fi
+ cd $(NEON_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ $(NEON_SRCDIR)/configure \
+ PATH=$(NEON_OBJDIR):$$PATH \
+ --prefix=$(PREFIX)/neon \
+ --with-ssl \
+ --enable-shared \
+ --without-libproxy
+ touch $@
+
+# compile neon
+$(NEON_OBJDIR)/.compiled: $(NEON_OBJDIR)/.configured
+ (cd $(NEON_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install neon
+$(NEON_OBJDIR)/.installed: $(NEON_OBJDIR)/.compiled
+ (cd $(NEON_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+
+#######################################################################
+# serf
+#######################################################################
+
+serf-retrieve: $(SERF_OBJDIR)/.retrieved
+serf-configure: $(SERF_OBJDIR)/.configured
+serf-compile: $(SERF_OBJDIR)/.compiled
+serf-install: $(SERF_OBJDIR)/.installed
+serf-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(SERF_OBJDIR)/$(f);)
+
+serf-clean:
+ -(cd $(SERF_SRCDIR) && scons -c)
+
+
+# fetch distfile for serf
+#$(DISTDIR)/$(SERF_DIST):
+# cd $(DISTDIR) && $(FETCH_CMD) $(SERF_URL)
+#
+# retrieve serf
+#$(SERF_OBJDIR)/.retrieved: $(DISTDIR)/$(SERF_DIST)
+# [ -d $(SERF_OBJDIR) ] || mkdir -p $(SERF_OBJDIR)
+# tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SERF_DIST)
+# touch $@
+
+# retrieve serf if not present yet
+$(SERF_OBJDIR)/.retrieved:
+ [ -d $(SERF_OBJDIR) ] || mkdir -p $(SERF_OBJDIR)
+ if [ ! -d $(SERF_SRCDIR) ]; then \
+ svn co $(SERF_URL) $(SERF_SRCDIR); \
+ svn merge ^/serf/branches/1.3.x-sslbuild@1781542 $(SERF_SRCDIR); \
+ fi
+ touch $@
+
+# compile serf (serf won't compile outside its source tree)
+$(SERF_OBJDIR)/.compiled: $(SERF_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
+ cd $(SERF_SRCDIR) && \
+ scons DEBUG=1 \
+ CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ APR=$(PREFIX)/apr \
+ APU=$(PREFIX)/apr \
+ PREFIX=$(PREFIX)/serf
+ touch $@
+
+# install serf
+$(SERF_OBJDIR)/.installed: $(SERF_OBJDIR)/.compiled
+ rm -rf $(PREFIX)/serf # XXX scons cannot reinstall :(
+ cd $(SERF_SRCDIR) && \
+ scons install
+ # work around unportable scons shared lib support
+ -ln -s libserf-1.so.$(shell echo $(SERF_VER) | sed -e 's/[0-9]$$/0/') \
+ $(PREFIX)/serf/lib/libserf-1.so
+ touch $@
+
+#######################################################################
+# serf-old (compatible with Subversion 1.5)
+#######################################################################
+
+serf-old-retrieve: $(SERF_OLD_OBJDIR)/.retrieved
+serf-old-configure: $(SERF_OLD_OBJDIR)/.configured
+serf-old-compile: $(SERF_OLD_OBJDIR)/.compiled
+serf-old-install: $(SERF_OLD_OBJDIR)/.installed
+serf-old-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(SERF_OLD_OBJDIR)/$(f);)
+
+serf-old-clean:
+ -(cd $(SERF_OLD_SRCDIR) && ./serfmake clean)
+
+# retrieve serf if not present yet
+$(SERF_OLD_OBJDIR)/.retrieved:
+ [ -d $(SERF_OLD_OBJDIR) ] || mkdir -p $(SERF_OLD_OBJDIR)
+ if [ ! -d $(SERF_OLD_SRCDIR) ]; then \
+ svn export $(SERF_OLD_URL) $(SERF_OLD_SRCDIR); \
+ fi
+ touch $@
+
+# compile serf (serf won't compile outside its source tree)
+$(SERF_OLD_OBJDIR)/.compiled: $(SERF_OLD_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
+ cd $(SERF_OLD_SRCDIR) && \
+ env CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ ./serfmake --with-apr=$(PREFIX)/apr \
+ --prefix=$(PREFIX)/serf-old \
+ build
+ touch $@
+
+# install serf
+$(SERF_OLD_OBJDIR)/.installed: $(SERF_OLD_OBJDIR)/.compiled
+ cd $(SERF_OLD_SRCDIR) && \
+ ./serfmake --with-apr=$(PREFIX)/apr \
+ --with-apr-util=$(PREFIX)/apr \
+ --prefix=$(PREFIX)/serf-old \
+ install
+ touch $@
+
+
+#######################################################################
+# sqlite
+#######################################################################
+
+sqlite-retrieve: $(SQLITE_OBJDIR)/.retrieved
+sqlite-configure: $(SQLITE_OBJDIR)/.configured
+sqlite-compile: $(SQLITE_OBJDIR)/.compiled
+sqlite-install: $(SQLITE_OBJDIR)/.installed
+sqlite-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(SQLITE_OBJDIR)/$(f);)
+
+sqlite-clean:
+ -cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make clean
+
+# fetch distfile for sqlite
+$(DISTDIR)/$(SQLITE_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(SQLITE_URL)
+
+# retrieve sqlite
+$(SQLITE_OBJDIR)/.retrieved: $(DISTDIR)/$(SQLITE_DIST)
+ $(call do_check_sha256,$(SQLITE_DIST))
+ [ -d $(SQLITE_OBJDIR) ] || mkdir -p $(SQLITE_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SQLITE_DIST)
+ touch $@
+
+ifeq ($(THREADING),yes)
+THREADSAFE_FLAG=--enable-threadsafe
+else
+THREADSAFE_FLAG=--disable-threadsafe
+endif
+
+# configure sqlite
+$(SQLITE_OBJDIR)/.configured: $(SQLITE_OBJDIR)/.retrieved
+ cd $(SQLITE_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ $(SQLITE_SRCDIR)/configure \
+ --prefix=$(PREFIX)/sqlite \
+ $(THREADSAFE_FLAG)
+ touch $@
+
+# compile sqlite
+$(SQLITE_OBJDIR)/.compiled: $(SQLITE_OBJDIR)/.configured
+ (cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install sqlite
+$(SQLITE_OBJDIR)/.installed: $(SQLITE_OBJDIR)/.compiled
+ (cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# cyrus-sasl
+#######################################################################
+
+cyrus-sasl-retrieve: $(CYRUS_SASL_OBJDIR)/.retrieved
+cyrus-sasl-configure: $(CYRUS_SASL_OBJDIR)/.configured
+cyrus-sasl-compile: $(CYRUS_SASL_OBJDIR)/.compiled
+cyrus-sasl-install: $(CYRUS_SASL_OBJDIR)/.installed
+cyrus-sasl-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(CYRUS_SASL_OBJDIR)/$(f);)
+
+cyrus-sasl-clean:
+ -(cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make distclean)
+
+# fetch distfile for cyrus-sasl
+$(DISTDIR)/$(CYRUS_SASL_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(CYRUS_SASL_URL)
+
+# retrieve cyrus-sasl
+$(CYRUS_SASL_OBJDIR)/.retrieved: $(DISTDIR)/$(CYRUS_SASL_DIST)
+ $(call do_check_sha256,$(CYRUS_SASL_DIST))
+ [ -d $(CYRUS_SASL_OBJDIR) ] || mkdir -p $(CYRUS_SASL_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(CYRUS_SASL_DIST)
+ # fixes build on Debian:
+ sed 's/#elif WITH_DES/#elif defined(WITH_DES)/' \
+ < $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c \
+ > $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched
+ mv $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched \
+ $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c
+ifeq ($(UNAME),OpenBSD)
+ # Fixes GSSAPI support on OpenBSD, which hasn't got libroken:
+ for f in `grep -l -R -- -lroken $(CYRUS_SASL_SRCDIR)`; do \
+ sed -e 's/-lroken//g' < $$f > $$f.tmp && \
+ mv $$f.tmp $$f; \
+ done
+ chmod +x $(CYRUS_SASL_SRCDIR)/configure
+endif
+ # Fixes excessive auth log spam from sasl if broken .la files exist
+ sed 's/SASL_LOG_WARN/SASL_LOG_DEBUG/' \
+ < $(CYRUS_SASL_SRCDIR)/lib/dlopen.c \
+ > $(CYRUS_SASL_SRCDIR)/lib/dlopen.c.patched
+ mv $(CYRUS_SASL_SRCDIR)/lib/dlopen.c.patched \
+ $(CYRUS_SASL_SRCDIR)/lib/dlopen.c
+ # Fix a weird autotools error about missing cmulocal dir
+ (cd $(CYRUS_SASL_SRCDIR)/saslauthd/ && ln -sf ../cmulocal)
+ touch $@
+
+# configure cyrus-sasl
+$(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved \
+ $(BDB_OBJDIR)/.installed $(SQLITE_OBJDIR)/.installed
+ cd $(CYRUS_SASL_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" \
+ CPPFLAGS="-I/usr/include/kerberosV" \
+ GREP="`which grep`" \
+ $(CYRUS_SASL_SRCDIR)/configure \
+ --with-dbpath=$(PREFIX)/cyrus-sasl/etc/sasldb2 \
+ --with-plugindir=$(PREFIX)/cyrus-sasl/lib/sasl2 \
+ --with-configdir=$(PREFIX)/cyrus-sasl/lib/sasl2 \
+ --with-bdb-libdir=$(PREFIX)/bdb/lib \
+ --with-bdb-incdir=$(PREFIX)/bdb/include \
+ --with-dblib=berkeley \
+ --with-sqlite=$(PREFIX)/sqlite \
+ --prefix=$(PREFIX)/cyrus-sasl
+ touch $@
+
+# compile cyrus-sasl
+$(CYRUS_SASL_OBJDIR)/.compiled: $(CYRUS_SASL_OBJDIR)/.configured
+ (cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install cyrus-sasl
+$(CYRUS_SASL_OBJDIR)/.installed: $(CYRUS_SASL_OBJDIR)/.compiled
+ (cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# libmagic
+#######################################################################
+
+libmagic-retrieve: $(LIBMAGIC_OBJDIR)/.retrieved
+libmagic-configure: $(LIBMAGIC_OBJDIR)/.configured
+libmagic-compile: $(LIBMAGIC_OBJDIR)/.compiled
+libmagic-install: $(LIBMAGIC_OBJDIR)/.installed
+libmagic-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(LIBMAGIC_OBJDIR)/$(f);)
+
+libmagic-clean:
+ -(cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make distclean)
+
+# fetch distfile for libmagic
+$(DISTDIR)/$(LIBMAGIC_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(LIBMAGIC_URL)
+
+# retrieve libmagic
+$(LIBMAGIC_OBJDIR)/.retrieved: $(DISTDIR)/$(LIBMAGIC_DIST)
+ $(call do_check_sha256,$(LIBMAGIC_DIST))
+ [ -d $(LIBMAGIC_OBJDIR) ] || mkdir -p $(LIBMAGIC_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(LIBMAGIC_DIST)
+ touch $@
+
+# configure libmagic
+$(LIBMAGIC_OBJDIR)/.configured: $(LIBMAGIC_OBJDIR)/.retrieved
+ cd $(LIBMAGIC_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\
+ $(LIBMAGIC_SRCDIR)/configure \
+ --enable-fsect-man5 \
+ --prefix=$(PREFIX)/libmagic
+ touch $@
+
+# compile libmagic
+$(LIBMAGIC_OBJDIR)/.compiled: $(LIBMAGIC_OBJDIR)/.configured
+ (cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install libmagic
+$(LIBMAGIC_OBJDIR)/.installed: $(LIBMAGIC_OBJDIR)/.compiled
+ (cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# ruby
+#######################################################################
+
+ruby-retrieve: $(RUBY_OBJDIR)/.retrieved
+ruby-configure: $(RUBY_OBJDIR)/.configured
+ruby-compile: $(RUBY_OBJDIR)/.compiled
+ruby-install: $(RUBY_OBJDIR)/.installed
+ruby-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(RUBY_OBJDIR)/$(f);)
+
+ruby-clean:
+ -(cd $(RUBY_OBJDIR) && env MAKEFLAGS= make distclean)
+
+# fetch distfile for ruby
+$(DISTDIR)/$(RUBY_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(RUBY_URL)
+
+# retrieve ruby
+#
+$(RUBY_OBJDIR)/.retrieved: $(DISTDIR)/$(RUBY_DIST)
+ $(call do_check_sha256,$(RUBY_DIST))
+ [ -d $(RUBY_OBJDIR) ] || mkdir -p $(RUBY_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(RUBY_DIST)
+ -which ghead && sed -i -e "s/head -c/ghead -c/" $(RUBY_SRCDIR)/configure
+ touch $@
+
+ifeq ($(THREADING),yes)
+THREADSAFE_FLAG=--enable-pthread
+else
+THREADSAFE_FLAG=--disable-pthread
+endif
+
+# configure ruby
+$(RUBY_OBJDIR)/.configured: $(RUBY_OBJDIR)/.retrieved
+ cd $(RUBY_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\
+ $(RUBY_SRCDIR)/configure \
+ --prefix=$(PREFIX)/ruby \
+ --enable-shared \
+ --with-baseruby="$(RUBY)" \
+ $(THREADSAFE_FLAG) \
+ --disable-install-doc \
+ --without-valgrind \
+ --without-gmp
+ touch $@
+
+# compile ruby
+$(RUBY_OBJDIR)/.compiled: $(RUBY_OBJDIR)/.configured
+ (cd $(RUBY_OBJDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install ruby
+$(RUBY_OBJDIR)/.installed: $(RUBY_OBJDIR)/.compiled
+ (cd $(RUBY_OBJDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# bz2
+#######################################################################
+
+bz2-retrieve: $(BZ2_OBJDIR)/.retrieved
+bz2-compile: $(BZ2_OBJDIR)/.compiled
+bz2-install: $(BZ2_OBJDIR)/.installed
+bz2-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(BZ2_OBJDIR)/$(f);)
+
+bz2-clean:
+ -(cd $(BZ2_SRCDIR) && env MAKEFLAGS= make distclean)
+
+# fetch distfile for bz2
+$(DISTDIR)/$(BZ2_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(BZ2_URL)
+
+# retrieve bz2
+$(BZ2_OBJDIR)/.retrieved: $(DISTDIR)/$(BZ2_DIST)
+ $(call do_check_sha256,$(BZ2_DIST))
+ [ -d $(BZ2_OBJDIR) ] || mkdir -p $(BZ2_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(BZ2_DIST)
+ touch $@
+
+# compile bz2
+$(BZ2_OBJDIR)/.compiled: $(BZ2_OBJDIR)/.retrieved
+ (cd $(BZ2_SRCDIR) && env MAKEFLAGS= make CFLAGS="-g $(PROFILE_CFLAGS) -fPIC")
+ touch $@
+
+# install bz2
+$(BZ2_OBJDIR)/.installed: $(BZ2_OBJDIR)/.compiled
+ (cd $(BZ2_SRCDIR) && env MAKEFLAGS= make install PREFIX=$(PREFIX)/bz2)
+ touch $@
+
+
+#######################################################################
+# python
+#######################################################################
+
+python-retrieve: $(PYTHON_OBJDIR)/.retrieved
+python-configure: $(PYTHON_OBJDIR)/.configured
+python-compile: $(PYTHON_OBJDIR)/.compiled
+python-install: $(PYTHON_OBJDIR)/.installed
+python-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(PYTHON_OBJDIR)/$(f);)
+
+python-clean:
+ -(cd $(PYTHON_OBJDIR) && env MAKEFLAGS= make distclean)
+
+# fetch distfile for python
+$(DISTDIR)/$(PYTHON_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(PYTHON_URL)
+
+# https://bugs.python.org/issue12560
+$(DISTDIR)/python-issue12560.patch:
+ cd $(DISTDIR) && $(FETCH_CMD) -O "$@" \
+ https://hg.python.org/cpython/raw-rev/32cc37a89b58
+
+# retrieve python
+#
+$(PYTHON_OBJDIR)/.retrieved: $(DISTDIR)/$(PYTHON_DIST) $(DISTDIR)/python-issue12560.patch
+ $(call do_check_sha256,$(PYTHON_DIST))
+ [ -d $(PYTHON_OBJDIR) ] || mkdir -p $(PYTHON_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(PYTHON_DIST)
+ # Make setup.py use our own dependencies instead of system ones
+ sed -e "s#sqlite_inc_paths = \[ '/usr/include',#sqlite_inc_paths = [ '$(PREFIX)/sqlite/include',#" \
+ -e "s#'/usr/include/db4'#'$(PREFIX)/bdb/include'#" \
+ -e "s|\(add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')\)|pass #\1|" \
+ -e "s|\(add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')\)|pass #\1|" \
+ -e "s#find_library_file(lib_dirs, 'bz2'#find_library_file(['$(PREFIX)/bz2/lib'] + lib_dirs, 'bz2'#" \
+ < $(PYTHON_SRCDIR)/setup.py \
+ > $(PYTHON_SRCDIR)/setup.py.patched
+ mv $(PYTHON_SRCDIR)/setup.py.patched $(PYTHON_SRCDIR)/setup.py
+ chmod +x $(PYTHON_SRCDIR)/setup.py
+ cd $(PYTHON_SRCDIR) && patch -p1 < $(DISTDIR)/python-issue12560.patch
+ touch $@
+
+# configure python
+ifdef PROFILE
+PYTHON_PROFILING=--enable-profiling
+endif
+$(PYTHON_OBJDIR)/.configured: $(PYTHON_OBJDIR)/.retrieved \
+ $(BZ2_OBJDIR)/.installed
+ cd $(PYTHON_OBJDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ CPPFLAGS="-I$(PREFIX)/bz2/include" \
+ LDFLAGS="-Wl,-rpath=$(PREFIX)/python/lib -L$(PREFIX)/bz2/lib" \
+ LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
+ $(PYTHON_SRCDIR)/configure \
+ --prefix=$(PREFIX)/python \
+ --enable-shared \
+ --with-system-expat \
+ --with-dbmliborder=bdb \
+ $(PYTHON_PROFILING)
+ touch $@
+
+# compile python
+$(PYTHON_OBJDIR)/.compiled: $(PYTHON_OBJDIR)/.configured
+ (cd $(PYTHON_OBJDIR) && \
+ env MAKEFLAGS= \
+ LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
+ make)
+ touch $@
+
+# install python
+$(PYTHON_OBJDIR)/.installed: $(PYTHON_OBJDIR)/.compiled
+ (cd $(PYTHON_OBJDIR) && \
+ env MAKEFLAGS= \
+ LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
+ make install)
+ touch $@
+
+
+#######################################################################
+# junit
+#######################################################################
+
+# fetch distfile for junit
+$(DISTDIR)/$(JUNIT_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(JUNIT_URL)
+ $(call do_check_sha256,$(JUNIT_DIST))
+
+
+#######################################################################
+# gettext
+#######################################################################
+
+gettext-retrieve: $(GETTEXT_OBJDIR)/.retrieved
+gettext-configure: $(GETTEXT_OBJDIR)/.configured
+gettext-compile: $(GETTEXT_OBJDIR)/.compiled
+gettext-install: $(GETTEXT_OBJDIR)/.installed
+gettext-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(GETTEXT_OBJDIR)/$(f);)
+
+gettext-clean:
+ -(cd $(GETTEXT_SRCDIR) && env MAKEFLAGS= make clean)
+
+# fetch distfile for gettext
+$(DISTDIR)/$(GETTEXT_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(GETTEXT_URL)
+
+# retrieve gettext
+$(GETTEXT_OBJDIR)/.retrieved: $(DISTDIR)/$(GETTEXT_DIST)
+ $(call do_check_sha256,$(GETTEXT_DIST))
+ [ -d $(GETTEXT_OBJDIR) ] || mkdir -p $(GETTEXT_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(GETTEXT_DIST)
+ touch $@
+
+# (gettext won't compile outside its source tree)
+# configure gettext
+$(GETTEXT_OBJDIR)/.configured: $(GETTEXT_OBJDIR)/.retrieved
+ cd $(GETTEXT_SRCDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\
+ LDFLAGS="-L$(PREFIX)/iconv/lib" \
+ $(GETTEXT_SRCDIR)/configure \
+ --prefix=$(PREFIX)/gettext \
+ --with-libiconv-prefix=$(PREFIX)/iconv \
+ --disable-c++ \
+ --disable-java \
+ --disable-csharp \
+ $(THREADS_FLAG)
+ -which gsed && \
+ sed -e 's/sed /gsed /g' < $(GETTEXT_SRCDIR)/build-aux/moopp \
+ > $(GETTEXT_SRCDIR)/build-aux/moopp.fixed && \
+ mv $(GETTEXT_SRCDIR)/build-aux/moopp.fixed \
+ $(GETTEXT_SRCDIR)/build-aux/moopp && \
+ chmod +x $(GETTEXT_SRCDIR)/build-aux/moopp
+ touch $@
+
+# compile gettext
+$(GETTEXT_OBJDIR)/.compiled: $(GETTEXT_OBJDIR)/.configured
+ (cd $(GETTEXT_SRCDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install gettext
+$(GETTEXT_OBJDIR)/.installed: $(GETTEXT_OBJDIR)/.compiled
+ (cd $(GETTEXT_SRCDIR) && env MAKEFLAGS= make install)
+ touch $@
+
+#######################################################################
+# lz4
+#######################################################################
+
+lz4-retrieve: $(LZ4_OBJDIR)/.retrieved
+lz4-configure: $(LZ4_OBJDIR)/.configured
+lz4-compile: $(LZ4_OBJDIR)/.compiled
+lz4-install: $(LZ4_OBJDIR)/.installed
+lz4-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(LZ4_OBJDIR)/$(f);)
+
+lz4-clean:
+ -(cd $(LZ4_SRCDIR) && env MAKEFLAGS= $(MAKE) clean)
+
+# fetch distfile for lz4
+$(DISTDIR)/$(LZ4_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) -O $(LZ4_DIST) $(LZ4_URL)
+
+# retrieve lz4
+$(LZ4_OBJDIR)/.retrieved: $(DISTDIR)/$(LZ4_DIST)
+ $(call do_check_sha256,$(LZ4_DIST))
+ [ -d $(LZ4_OBJDIR) ] || mkdir -p $(LZ4_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(LZ4_DIST)
+ touch $@
+
+# configure lz4
+$(LZ4_OBJDIR)/.configured: $(LZ4_OBJDIR)/.retrieved
+ touch $@
+
+# compile lz4
+$(LZ4_OBJDIR)/.compiled: $(LZ4_OBJDIR)/.configured
+ (cd $(LZ4_SRCDIR)/lib && \
+ env MAKEFLAGS= $(MAKE) PREFIX=$(PREFIX)/lz4)
+ touch $@
+
+# install lz4
+$(LZ4_OBJDIR)/.installed: $(LZ4_OBJDIR)/.compiled
+ mkdir -p $(PREFIX)/lz4/lib
+ (cd $(LZ4_SRCDIR)/lib && \
+ env MAKEFLAGS= $(MAKE) PREFIX=$(PREFIX)/lz4 install)
+ touch $@
+
+#######################################################################
+# svn
+#######################################################################
+
+.PHONY: svn-configure svn-compile svn-install svn-bindings-compile \
+ svn-bindings-install svn-bindings-reset svn-clean
+
+svn-install-all: svn-install svn-bindings-install
+
+svn-retrieve: $(SVN_OBJDIR)/.retrieved
+svn-configure: $(SVN_OBJDIR)/.configured
+svn-compile: $(SVN_OBJDIR)/.compiled
+svn-bindings-compile: $(SVN_OBJDIR)/.bindings-compiled
+svn-install: $(SVN_OBJDIR)/.installed
+svn-bindings-install: $(SVN_OBJDIR)/.bindings-installed
+svn-bindings-reset:
+ $(foreach f, .bindings-compiled .bindings-installed, \
+ rm -f $(SVN_OBJDIR)/$(f);)
+svn-reset: svn-bindings-reset
+ $(foreach f, .retrieved .configured .compiled .installed \
+ .bindings-compiled .bindings-installed, \
+ rm -f $(SVN_OBJDIR)/$(f);)
+
+svn-clean:
+ -(cd $(svn_builddir) && env MAKEFLAGS= make distclean)
+
+# retrieve svn if not present yet
+$(SVN_OBJDIR)/.retrieved:
+ [ -d $(SVN_OBJDIR) ] || mkdir -p $(SVN_OBJDIR)
+ if [ "$(TAG)" != "none" ]; then \
+ branchdir="tags/$(TAG)"; \
+ co="export"; \
+ elif [ $(BRANCH) != trunk ]; then \
+ branchdir="branches/$(BRANCH)"; \
+ co="co"; \
+ else \
+ branchdir="$(BRANCH)"; \
+ co="co"; \
+ fi; \
+ if [ ! -d $(SVN_WC) ] && [ ! -h $(SVN_WC) ]; then \
+ svn $${co} $(SUBVERSION_REPOS_URL)/$${branchdir} \
+ $(SVN_WC); \
+ fi
+ touch $@
+
+ifeq ($(BRANCH_MAJOR),1.7)
+BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
+SERF_FLAG=--with-serf="$(PREFIX)/serf"
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
+LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
+JAVAHL_CHECK_TARGET=check-javahl
+else ifeq ($(BRANCH_MAJOR),1.6)
+BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
+SERF_FLAG=--with-serf="$(PREFIX)/serf"
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
+W_NO_SYSTEM_HEADERS=-Wno-system-headers
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
+JAVAHL_CHECK_TARGET=check-javahl
+else ifeq ($(BRANCH_MAJOR),1.5)
+BDB_FLAG=$(PREFIX)/bdb
+SERF_FLAG=--with-serf="$(PREFIX)/serf-old"
+MOD_DAV_SVN=modules/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/mod_dontdothat.so
+DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
+W_NO_SYSTEM_HEADERS=-Wno-system-headers
+NEON_FLAG=--with-neon="$(PREFIX)/neon"
+JAVAHL_CHECK_TARGET=check-javahl
+else ifeq ($(BRANCH_MAJOR), $(filter $(BRANCH_MAJOR), 1.8 1.9))
+BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
+SERF_FLAG=--with-serf="$(PREFIX)/serf"
+# serf >= 1.3.0 is built with scons and no longer sets up rpath linker flags,
+# so we have to do that ourselves :(
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
+LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
+JAVAHL_CHECK_TARGET=check-all-javahl
+else # 1.10
+BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
+SERF_FLAG=--with-serf="$(PREFIX)/serf"
+# serf >= 1.3.0 is built with scons and no longer sets up rpath linker flags,
+# so we have to do that ourselves :(
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
+MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
+MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
+LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
+JAVAHL_CHECK_TARGET=check-all-javahl
+LZ4_FLAG=--with-lz4=$(PREFIX)/lz4
+UTF8PROC_FLAG=--with-utf8proc=internal
+endif
+
+ifeq ($(ENABLE_JAVA_BINDINGS),yes)
+ JAVAHL_FLAG=--enable-javahl=yes --with-jdk --with-jikes=no \
+ --with-junit=$(DISTDIR)/$(JUNIT_DIST)
+else
+ JAVAHL_FLAG=--with-jdk=no
+endif
+
+ifdef PROFILE
+SVN_STATIC_FLAG=--enable-all-static
+else
+SVN_STATIC_FLAG=--disable-static
+SVN_WITH_HTTPD=--with-apxs="$(PREFIX)/httpd/bin/apxs" \
+ --with-apache-libexecdir="$(PREFIX)/httpd/modules/svn-$(WC)"
+SVN_WITH_SASL=--with-sasl="$(PREFIX)/cyrus-sasl"
+endif
+
+$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved $(DISTDIR)/$(JUNIT_DIST) \
+ $(APR_OBJDIR)/.installed $(APR_UTIL_OBJDIR)/.installed \
+ $(BDB_OBJDIR)/.installed $(SQLITE_OBJDIR)/.installed \
+ $(HTTPD_OBJDIR)/.installed $(CYRUS_SASL_OBJDIR)/.installed \
+ $(LIBMAGIC_OBJDIR)/.installed $(NEON_OBJDIR)/.installed \
+ $(SERF_OBJDIR)/.installed $(SERF_OLD_OBJDIR)/.installed \
+ $(RUBY_OBJDIR)/.installed $(PYTHON_OBJDIR)/.installed
+ cd $(SVN_SRCDIR) && ./autogen.sh
+ cd $(svn_builddir) && \
+ env LDFLAGS="-L$(PREFIX)/neon/lib -L$(PREFIX)/apr/lib $(SERF_LDFLAG) -L$(PREFIX)/gettext/lib -L$(PREFIX)/iconv/lib" \
+ CFLAGS="-I$(PREFIX)/gettext/include -DAPR_POOL_DEBUG" \
+ CXXFLAGS="-I$(PREFIX)/gettext/include -DAPR_POOL_DEBUG" \
+ LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
+ GREP="`which grep`" \
+ PATH=$(PREFIX)/ruby/bin:$(PREFIX)/python/bin:$(PREFIX)/gettext/bin:$$PATH \
+ $(SVN_SRCDIR)/configure \
+ --enable-maintainer-mode \
+ --prefix="$(SVN_PREFIX)" \
+ --with-apr="$(PREFIX)/apr" \
+ --with-apr-util="$(PREFIX)/apr" \
+ $(NEON_FLAG) \
+ $(SVN_WITH_HTTPD) \
+ $(SVN_WITH_SASL) \
+ $(SERF_FLAG) \
+ --with-sqlite="$(PREFIX)/sqlite" \
+ --with-zlib="/usr" \
+ --without-gnome-keyring \
+ --with-berkeley-db="$(BDB_FLAG)" \
+ --with-ruby-sitedir="$(SVN_PREFIX)/lib/ruby/site_ruby" \
+ --disable-mod-activation \
+ $(JAVAHL_FLAG) \
+ $(LIBMAGIC_FLAG) \
+ $(LZ4_FLAG) \
+ $(UTF8PROC_FLAG) \
+ $(SVN_STATIC_FLAG) \
+ $(DISABLE_NEON_VERSION_CHECK)
+ touch $@
+
+# compile svn
+$(SVN_OBJDIR)/.compiled: $(SVN_OBJDIR)/.configured
+ cd $(svn_builddir) \
+ && env MAKEFLAGS= make EXTRA_CFLAGS="$(PROFILE_CFLAGS) $(W_NO_SYSTEM_HEADERS)"
+ touch $@
+
+# install svn
+$(SVN_OBJDIR)/.installed: $(SVN_OBJDIR)/.compiled
+ cd $(svn_builddir) \
+ && env MAKEFLAGS= make install install-tools
+ touch $@
+
+# SWIG 1.x and 2.x are not compatible. If SWIG 2.x is used to generated .swg
+# files and 1.x is used to build the bindings, the Python bindings fail to
+# load with errors such as "undefined symbol 'SWIG_Python_str_AsChar'".
+# So clean any pre-generated .swg files to make sure everything is done
+# by the same version of SWIG.
+$(SVN_OBJDIR)/.pre-generated-swig-cleaned:
+ -cd $(svn_builddir) \
+ && env MAKEFLAGS= make clean-swig
+ touch $@
+
+$(SVN_OBJDIR)/.bindings-compiled: $(SVN_OBJDIR)/.installed $(SVN_OBJDIR)/.pre-generated-swig-cleaned
+ cd $(svn_builddir) \
+ && env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ env MAKEFLAGS= make swig-py
+ cd $(svn_builddir) && \
+ env PATH=$(PREFIX)/ruby/bin:$$PATH \
+ LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) env MAKEFLAGS= make swig-rb
+ if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
+ cd $(svn_builddir) \
+ && env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ env MAKEFLAGS= make swig-pl; \
+ fi
+ if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
+ cd $(svn_builddir) \
+ && env MAKEFLAGS= make javahl; \
+ fi
+ touch $@
+
+$(SVN_OBJDIR)/.bindings-installed: $(SVN_OBJDIR)/.bindings-compiled
+ cd $(svn_builddir) \
+ && env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ env MAKEFLAGS= make install-swig-py
+ cd $(svn_builddir) && \
+ env PATH=$(PREFIX)/ruby/bin:$$PATH \
+ LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) env MAKEFLAGS= make install-swig-rb
+ if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
+ cd $(svn_builddir) \
+ && env MAKEFLAGS= make install-swig-pl-lib; \
+ cd subversion/bindings/swig/perl/native \
+ && perl Makefile.PL PREFIX="$(SVN_PREFIX)" \
+ && env MAKEFLAGS= make install; \
+ fi
+ if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
+ cd $(svn_builddir) \
+ && env MAKEFLAGS= make install-javahl; \
+ fi
+ touch $@
+
+# run svn regression tests
+HTTPD_CHECK_CONF=$(PREFIX)/httpd/conf/httpd-svn-check-$(WC).conf
+HTTPD_PROXY_CONF=$(PREFIX)/httpd/conf/httpd-svn-proxy-$(WC).conf
+HTTPD_CHECK_USERS=$(PREFIX)/httpd/conf/httpd-svn-check-users
+HTTPD_CHECK_GROUPS=$(PREFIX)/httpd/conf/httpd-svn-check-groups
+HTTPD_CHECK_PORT=8081
+HTTPD_PROXY_PORT=8082
+MOD_DONTDOTHAT_CONF=$(PREFIX)/httpd/conf/dontdothat
+
+$(MOD_DONTDOTHAT_CONF):
+ mkdir -p $(dir $@)
+ echo > $@.tmp '[recursive-actions]'
+ echo >>$@.tmp '/ = deny'
+ mv -f $@.tmp $@
+
+$(HTTPD_CHECK_GROUPS):
+ mkdir -p $(dir $@)
+ printf "random: jrandom\nconstant: jconstant\n" > $@
+
+$(HTTPD_CHECK_CONF): $(MOD_DONTDOTHAT_CONF) $(HTTPD_CHECK_GROUPS)
+ mkdir -p $(dir $@)
+ $(PREFIX)/httpd/bin/htpasswd -bc $(HTTPD_CHECK_USERS).tmp jrandom rayjandom
+ $(PREFIX)/httpd/bin/htpasswd -b $(HTTPD_CHECK_USERS).tmp jconstant rayjandom
+ $(PREFIX)/httpd/bin/htpasswd -b $(HTTPD_CHECK_USERS).tmp __dumpster__ __loadster__
+ $(PREFIX)/httpd/bin/htpasswd -b $(HTTPD_CHECK_USERS).tmp JRANDOM rayjandom
+ $(PREFIX)/httpd/bin/htpasswd -b $(HTTPD_CHECK_USERS).tmp JCONSTANT rayjandom
+ mv -f $(HTTPD_CHECK_USERS).tmp $(HTTPD_CHECK_USERS)
+ echo > $@.tmp '# httpd config for make check'
+ echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"'
+ echo >>$@.tmp 'Listen localhost:$(HTTPD_CHECK_PORT)'
+ echo >>$@.tmp 'LoadModule dav_svn_module $(MOD_DAV_SVN)'
+ echo >>$@.tmp 'LoadModule authz_svn_module $(MOD_AUTHZ_SVN)'
+ echo >>$@.tmp 'LoadModule dontdothat_module $(MOD_DONTDOTHAT)'
+ echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"'
+ echo >>$@.tmp '# These two Locations are used for "make check"'
+ echo >>$@.tmp '<Directory />'
+ echo >>$@.tmp ' Options FollowSymLinks'
+ echo >>$@.tmp ' AllowOverride None'
+ echo >>$@.tmp ' Order deny,allow'
+ echo >>$@.tmp ' Allow from all'
+ echo >>$@.tmp '</Directory>'
+ echo >>$@.tmp '<Location /svn-test-work/repositories>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/repositories'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /svn-test-work/local_tmp/repos>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp/repos'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '# This Location lets you access repositories dropped in /tmp/'
+ echo >>$@.tmp '<Location /svn>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath /tmp'
+ echo >>$@.tmp ' Allow from all'
+ echo >>$@.tmp ' #AuthType Basic'
+ echo >>$@.tmp ' #AuthName "Subversion Repository"'
+ echo >>$@.tmp ' #AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' #Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >> $@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >> $@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '# Location for tests using mod_dontdothat'
+ echo >>$@.tmp '<Location /ddt-test-work/repositories>'
+ echo >> $@.tmp 'DAV svn'
+ echo >> $@.tmp 'SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/repositories'
+ echo >> $@.tmp 'AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ echo >> $@.tmp 'AuthType Basic'
+ echo >> $@.tmp 'AuthName "Subversion Repository"'
+ echo >> $@.tmp 'AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ echo >> $@.tmp 'AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >> $@.tmp 'Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >> $@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >> $@.tmp 'DontDoThatConfigFile "$(MOD_DONTDOTHAT_CONF)"'
+ echo >> $@.tmp '</Location>'
+
+ echo >>$@.tmp '# Several locations for mod_authz_svn test follow'
+ echo >>$@.tmp '<Location /authz-test-work/anon>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' <IfModule mod_authz_core.c>'
+ echo >>$@.tmp ' Require all granted'
+ echo >>$@.tmp ' </IfModule>'
+ echo >>$@.tmp ' <IfModule !mod_authz_core.c>'
+ echo >>$@.tmp ' Allow from all'
+ echo >>$@.tmp ' </IfModule>'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/mixed>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' Satisfy Any'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/mixed-noauthwhenanon>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' AuthzSVNNoAuthWhenAnonymousAllowed On'
+ echo >>$@.tmp ' SVNPathAuthz On'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/authn>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/authn-anonoff>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' AuthzSVNAnonymous Off'
+ echo >>$@.tmp ' SVNPathAuthz On'
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/authn-lcuser>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' AuthzForceUsernameCase Lower'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<Location /authz-test-work/authn-group>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' AuthGroupFile $(HTTPD_CHECK_GROUPS)'
+ echo >>$@.tmp ' Require group random'
+ echo >>$@.tmp ' AuthzSVNAuthoritative Off'
+ echo >>$@.tmp ' SVNPathAuthz On'
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '<IfModule mod_authz_core.c>'
+ echo >>$@.tmp ' <Location /authz-test-work/sallrany>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $($SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' AuthzSendForbiddenOnFailure On'
+ echo >>$@.tmp ' Satisfy All'
+ echo >>$@.tmp ' <RequireAny>'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' Require expr req("ALLOW") == "1"'
+ echo >>$@.tmp ' </RequireAny>'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp ' </Location>'
+ echo >>$@.tmp ' <Location /authz-test-work/sallrall>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp'
+ echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ifeq ($(USE_HTTPV1),yes)
+ echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >>$@.tmp ' SVNListParentPath On'
+ echo >>$@.tmp ' AuthType Basic'
+ echo >>$@.tmp ' AuthName "Subversion Repository"'
+ echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' AuthzSendForbiddenOnFailure On'
+ echo >>$@.tmp ' Satisfy All'
+ echo >>$@.tmp ' <RequireAll>'
+ echo >>$@.tmp ' Require valid-user'
+ echo >>$@.tmp ' Require expr req("ALLOW") == "1"'
+ echo >>$@.tmp ' </RequireAll>'
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >>$@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp ' </Location>'
+ echo >>$@.tmp '</IfModule>'
+ echo >>$@.tmp 'RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$$ /svn-test-work/repositories/$$1'
+ echo >>$@.tmp 'RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$$ /svn-test-work/repositories/$$1'
+ echo >>$@.tmp 'Include "conf/$(SVN_REL_WC)*-custom.conf"'
+ echo >> $@.tmp '#SVNInMemoryCacheSize 0'
+ echo >> $@.tmp '#SVNCacheTextDeltas Off'
+ echo >> $@.tmp '#SVNCacheRevProps Off'
+ mv -f $@.tmp $@
+
+$(HTTPD_PROXY_CONF): $(HTTPD_CHECK_CONF)
+ mkdir -p $(dir $@)
+ echo > $@.tmp '# httpd config for a write-through proxy'
+ echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"'
+ echo >>$@.tmp 'Listen localhost:$(HTTPD_PROXY_PORT)'
+ echo >>$@.tmp 'LoadModule dav_svn_module $(MOD_DAV_SVN)'
+ echo >>$@.tmp 'LoadModule authz_svn_module $(MOD_AUTHZ_SVN)'
+ echo >>$@.tmp 'LoadModule dontdothat_module $(MOD_DONTDOTHAT)'
+ echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"'
+ echo >>$@.tmp '# This Location lets you access repositories dropped in /tmp/svn-$(BRANCH)-proxy'
+ echo >>$@.tmp '<Location /svn>'
+ echo >>$@.tmp ' DAV svn'
+ echo >>$@.tmp ' SVNParentPath /tmp/svn-$(BRANCH)-proxy'
+ echo >>$@.tmp ' SVNMasterURI http://localhost:$(HTTPD_CHECK_PORT)/svn/'
+ echo >>$@.tmp ' Allow from all'
+ echo >>$@.tmp ' #AuthType Basic'
+ echo >>$@.tmp ' #AuthName "Subversion Repository"'
+ echo >>$@.tmp ' #AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' #Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >> $@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
+ echo >> $@.tmp ' SVNPathAuthz short_circuit'
+endif
+ echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '# This Location allows repositories to be synced'
+ echo >>$@.tmp '<Location /svn-proxy-sync>'
+ echo >>$@.tmp 'DAV svn'
+ echo >>$@.tmp 'SVNParentPath /tmp/svn-$(BRANCH)-proxy'
+ echo >>$@.tmp 'Allow from all'
+ echo >>$@.tmp '</Location>'
+ mv -f $@.tmp $@
+
+.PHONY: libpath
+libpath:
+ @echo export LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
+ "PYTHONPATH=$(SVN_PREFIX)/lib/svn-python"
+#
+# OpenBSD requires an LD_PRELOAD hack to dlopen() libraries linked to
+# libpthread (e.g. libsvn_auth_gnome_keyring.so) into executables that
+# aren't linked to libpthread.
+ifeq ($(UNAME),OpenBSD)
+LIB_PTHREAD_HACK=LD_PRELOAD=libpthread.so
+endif
+
+.PHONY: start-svnserve stop-svnserve start-httpd stop-httpd
+
+HTTPD_CMD = env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(LIB_PTHREAD_HACK) \
+ $(PREFIX)/httpd/bin/apachectl
+HTTPD_START_CMD = $(HTTPD_CMD) -f $(HTTPD_CHECK_CONF) -k start
+HTTPD_START_CMD_PROXY = $(HTTPD_CMD) -f $(HTTPD_PROXY_CONF)
+HTTPD_START_CMD_DEBUG = $(HTTPD_START_CMD) -X
+HTTPD_STOP_CMD = $(HTTPD_CMD) -f $(HTTPD_CHECK_CONF) -k stop; sleep 3
+
+SVNSERVE_START_CMD = (test -e $(PWD)/svnserve-*.pid && \
+ ls $(PWD)/svnserve-*.pid | while read pidfile; do \
+ kill `cat "$$pidfile"`; sleep 3; \
+ rm -f $$pidfile; \
+ done); \
+ $(SVN_PREFIX)/bin/svnserve \
+ --listen-host 127.0.0.1 \
+ --pid-file $(PWD)/svnserve-$(WC).pid \
+ -d -r $(svn_builddir)/subversion/tests/cmdline
+SVNSERVE_STOP_CMD = kill `cat $(PWD)/svnserve-$(WC).pid`; sleep 3; \
+ rm -f $(PWD)/svnserve-$(WC).pid
+
+start-httpd: $(HTTPD_CHECK_CONF)
+ $(HTTPD_START_CMD)
+ @echo "To run tests over http, run:"
+ @echo " make check BASE_URL=http://localhost:$(HTTPD_CHECK_PORT)"
+ @echo "The URL http://localhost:$(HTTPD_CHECK_PORT)/svn/"
+ @echo "lets you access repositories dropped into /tmp"
+
+start-httpd-debug: $(HTTPD_CHECK_CONF)
+ $(HTTPD_START_CMD_DEBUG) &
+ @echo "To run tests over http, run:"
+ @echo " make check BASE_URL=http://localhost:$(HTTPD_CHECK_PORT)"
+ @echo "The URL http://localhost:$(HTTPD_CHECK_PORT)/svn/"
+ @echo "lets you access repositories dropped into /tmp"
+ @echo "Trying to attach gdb to httpd..."
+ @sleep 1
+ gdb $(PREFIX)/httpd/bin/httpd `cat $(PREFIX)/httpd/logs/httpd.pid`
+
+start-httpd-proxy: $(HTTPD_PROXY_CONF)
+ $(HTTPD_START_CMD_PROXY)
+ @echo "The URL http://localhost:$(HTTPD_PROXY_PORT)/svn/"
+ @echo "lets you access repositories dropped into /tmp/svn-$(BRANCH)-proxy"
+
+stop-httpd: $(HTTPD_CHECK_CONF)
+ $(HTTPD_STOP_CMD)
+
+stop-httpd-proxy: $(HTTPD_PROXY_CONF)
+ pkill -f '$(PREFIX)/httpd/bin/httpd -f $(HTTPD_PROXY_CONF)'
+
+start-svnserve: $(SVN_OBJDIR)/.compiled
+ $(SVNSERVE_START_CMD)
+
+stop-svnserve:
+ $(SVNSERVE_STOP_CMD)
+
+define do_check
+-cd $(svn_builddir) && for fs in fsfs bdb; do \
+ echo "Begin test: $(subst svn-check-,,$@) x $$fs"; \
+ test -d "$(RAMDISK)/tmp" && export TMPDIR="$(RAMDISK)/tmp"; \
+ env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(LIB_PTHREAD_HACK) \
+ env MAKEFLAGS= make check PARALLEL=$(PARALLEL) CLEANUP=$(CLEANUP) \
+ EXCLUSIVE_WC_LOCKS=$(EXCLUSIVE_WC_LOCKS) \
+ MEMCACHED_SERVER=$(MEMCACHED_SERVER) $1 FS_TYPE=$$fs; \
+ for log in tests.log fails.log; do \
+ test -f $$log && mv -f $$log $$log.$@-$$fs; \
+ done; \
+done
+endef
+
+TEST_WORK=$(svn_builddir)/subversion/tests/cmdline/svn-test-work
+svn-check-prepare-ramdisk:
+ -rm -rf "$(TEST_WORK)"; \
+ if [ -d "$(RAMDISK)" ] && \
+ touch "$(RAMDISK)/$(SVN_REL_WC).writetest" && \
+ mkdir -p "$(RAMDISK)/$(SVN_REL_WC)"; then \
+ rm -f "$(RAMDISK)/$(SVN_REL_WC).writetest"; \
+ ln -s "$(RAMDISK)/$(SVN_REL_WC)" "$(TEST_WORK)"; \
+ mkdir -p "$(RAMDISK)/tmp"; \
+ fi
+
+ifndef NEON_FLAG
+svn-check-neon:
+ @echo Neon is not supported by this build of Subversion, skipping tests
+ @true
+else
+svn-check-neon: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk
+ $(HTTPD_START_CMD)
+ $(call do_check,BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=neon)
+ $(HTTPD_STOP_CMD)
+endif
+
+svn-check-serf: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk
+ $(HTTPD_START_CMD)
+ $(call do_check,BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=serf)
+ $(HTTPD_STOP_CMD)
+
+svn-check-local: svn-check-prepare-ramdisk
+ $(call do_check)
+
+svn-check-svn: svn-check-prepare-ramdisk
+ $(SVNSERVE_START_CMD)
+ $(call do_check,BASE_URL=svn://127.0.0.1)
+ $(SVNSERVE_STOP_CMD)
+
+.PHONY: svn-check-swig-pl svn-check-swig-py svn-check-swig-rb svn-check-javahl
+svn-check-bindings: svn-check-swig-pl svn-check-swig-py svn-check-swig-rb \
+ svn-check-javahl
+
+RUBYLIB=$(SVN_PREFIX)/lib/ruby/site_ruby$(shell grep \
+ ^svn_cv_ruby_sitedir_archsuffix $(svn_builddir)/config.log | \
+ cut -d'=' -f2):$(SVN_PREFIX)/lib/ruby/site_ruby$(shell \
+ grep ^svn_cv_ruby_sitedir_libsuffix $(svn_builddir)/config.log | \
+ cut -d'=' -f2)
+svn-check-swig-pl:
+ -if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
+ (cd $(svn_builddir) && \
+ env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ $(LIB_PTHREAD_HACK) \
+ env MAKEFLAGS= make check-swig-pl 2>&1) | \
+ tee $(svn_builddir)/tests.log.bindings.pl; \
+ fi
+
+svn-check-swig-py:
+ -(cd $(svn_builddir) && \
+ env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ env MAKEFLAGS= make check-swig-py 2>&1) | \
+ tee $(svn_builddir)/tests.log.bindings.py
+
+# We add the svn prefix to PATH here because the ruby tests
+# attempt to start an svnserve binary found in PATH.
+svn-check-swig-rb:
+ (cd $(svn_builddir) && \
+ env RUBYLIB=$(RUBYLIB) \
+ LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ PATH=$(SVN_PREFIX)/bin:$$PATH \
+ $(LIB_PTHREAD_HACK) \
+ env MAKEFLAGS= make check-swig-rb 2>&1) | \
+ tee $(svn_builddir)/tests.log.bindings.rb
+
+svn-check-javahl:
+ -if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
+ (cd $(svn_builddir) && \
+ env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
+ env MAKEFLAGS= make $(JAVAHL_CHECK_TARGET) 2>&1) | \
+ tee $(svn_builddir)/tests.log.bindings.javahl; \
+ fi
+
+svn-check: svn-check-prepare-ramdisk svn-check-local svn-check-svn \
+ svn-check-neon svn-check-serf svn-check-bindings
+
+.PHONY: sign-email
+ifdef NEON_FLAG
+NEON_STR=ra_neon |
+NEON_VER_LINE=@echo "neon: $(NEON_VER)"
+endif
+sign-email:
+ @echo "Summary: +1 to release"
+ @echo ""
+ @echo "Tested: [bdb | fsfs] x [ra_local | ra_svn | $(NEON_STR)ra_serf]"
+ @echo " swig bindings"
+ifeq ($(ENABLE_JAVA_BINDINGS),yes)
+ @echo " javahl bindings"
+endif
+ @echo ""
+ @echo "Test results: All passed."
+ @echo ""
+ @echo "Platform: `uname -r -s -m`"
+ @echo ""
+ @echo "Dependencies:"
+ @echo "bdb: $(BDB_VER)"
+ifeq ($(USE_APR_ICONV),yes)
+ @echo "apr-iconv: $(APR_ICONV_VER)"
+else
+ @echo "GNU-iconv: $(GNU_ICONV_VER)"
+endif
+ @echo "apr: $(APR_VER)"
+ @echo "apr-util: $(APR_UTIL_VER)"
+ @echo "httpd: $(HTTPD_VER)"
+ $(NEON_VER_LINE)
+ @echo "serf: $(SERF_VER)"
+ @echo "cyrus-sasl: $(CYRUS_SASL_VER)"
+ @echo "sqlite: $(SQLITE_VER)"
+ifdef LZ4_FLAG
+ @echo "lz4: $(LZ4_VER)"
+endif
+ @echo "libssl: `openssl version`"
+ @echo "swig: `swig -version | grep Version | cut -d' ' -f3`"
+ @echo "python: $(PYTHON_VER)"
+ @echo "perl: `eval \`perl -V:version\`; echo $$version`"
+ @echo "ruby: $(RUBY_VER)"
+ifeq ($(ENABLE_JAVA_BINDINGS),yes)
+ @echo "java: `java -version 2>&1 | grep version | cut -d' ' -f3 | sed -e 's/\"//g'`"
+endif
+ @echo ""
+ @echo "Signatures:"
+ @echo
+ @echo "subversion-$(TAG).tar.gz"
+ @echo "`cat subversion-$(TAG).tar.gz.asc`"
+ @echo
+ @echo "subversion-$(TAG).tar.bz2"
+ @echo "`cat subversion-$(TAG).tar.bz2.asc`"
diff --git a/tools/dev/unix-build/README b/tools/dev/unix-build/README
new file mode 100644
index 0000000..13cdc42
--- /dev/null
+++ b/tools/dev/unix-build/README
@@ -0,0 +1,96 @@
+Introduction
+============
+Makefile.svn aids Subversion developers on unix-like systems set up an
+SVN development environment without requiring root priviliges. It does
+this by fetching Subversion along with many of its dependencies from
+the internet and building them using sane defaults suitable for
+development (for example, it invokes --enable-maintainer-mode while
+compiling Subversion itself). However, indirect dependencies are not
+covered; you need OpenSSL installed to get SSL support in neon and
+serf for example. Also, it doesn't build all the bindings by default
+(javahl for example).
+
+This README only covers basic usage. Please read Makefile.svn for more
+details.
+
+Requirements
+============
+In addition to the usual GNU buildtools including a sane compiler and
+GNU autotools, some version of Subversion is required to be in
+$PATH. It is used to fetch the desired version of Subversion from the
+repository.
+
+Usage
+=====
+First, choose a directory $(SVN_DEV) to set up the environment.
+For example, $(SVN_DEV) could be the directory "~/svn".
+Note that this directory cannot be changed later because the script
+hardcodes build and link paths relative to the current working directory.
+
+ $ mkdir $(SVN_DEV)
+
+Now change into this directory and make the Makefile available in it:
+
+ $ cd $(SVN_DEV)
+ $ svn checkout https://svn.apache.org/repos/asf/subversion/trunk/tools/dev/unix-build
+ $ ln -s unix-build/Makefile.svn Makefile
+
+To fetch and build trunk, simply don't pass anything, just run 'make':
+
+ $ cd $(SVN_DEV)
+ $ make
+
+Pass the branch you want to build in BRANCH, e.g.
+ $ make BRANCH="1.5.x"
+You can also pass a tag to build:
+ $ make TAG="1.6.6"
+And you can specify a working copy to use, in case you need more
+than one working copy of the same branch:
+ $ make BRANCH="1.6.x" WC="1.6.x-test2"
+
+When the script has finished fetching and building, it uses
+$(SVN_DEV)/prefix to install Subversion libraries and
+binaries. $(SVN_DEV)/prefix/svn-trunk (or whatever you choose to
+build) will contain the latest Subversion binaries. You can add
+$(SVN_DEV)/prefix/svn-trunk/bin to your $PATH to use them:
+
+ $ export PATH="$(SVN_DEV)/prefix/svn-trunk/bin:$PATH"
+
+The Makefile in $(SVN_DEV)/svn-trunk is configured to build with sane
+options: while developing Subversion, simply `svn up` to pull the
+latest changes, `make` and `make install` to install the binaries in
+$(SVN_DEV)/prefix/svn-trunk. This usually works fine. If not, you may
+need to use the 'svn-reset' target and recompile everything.
+
+If at any point, you want to recompile any of the packages with the
+default configuration in Makefile.svn, use the *-clean and *-reset
+target in Makefile.svn before trying to rebuild again. For example:
+
+ $ make svn-clean
+ $ make svn-reset
+ $ make
+
+Or, if you want to recompile svn and all dependencies:
+
+ $ make clean
+ $ make reset
+ $ make
+
+If you want to remove everything including the installed binaries effectively
+returning to the starting point, use the "nuke" target (BE CAREFUL, this will
+remove the 'svn' binary compiled from trunk which you might need to manage
+existing working copies):
+
+ $ make nuke
+
+Extended usage
+==============
+The script can also run Subversion's regression test suite via all
+repository backends and RA methods. It generates the necessary
+configuration files and starts svnserve and httpd daemons
+automatically on non-privileged ports. The default test target to test
+everything is "svn-check".
+
+Notes
+=====
+The script currently doesn't build Ctypes Python bindings.
diff --git a/tools/dev/verify-history.py b/tools/dev/verify-history.py
new file mode 100755
index 0000000..a408cc7
--- /dev/null
+++ b/tools/dev/verify-history.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# This program is used to verify the FS history code.
+#
+# The basic gist is this: given a repository, a path in that
+# repository, and a revision at which to begin plowing through history
+# (towards revision 1), verify that each history object returned by
+# the svn_fs_history_prev() interface -- indirectly via
+# svn_repos_history() -- represents a revision in which the node being
+# tracked actually changed, or where a parent directory of the node
+# was copied, according to the list of paths changed as reported by
+# svn_fs_paths_changed().
+#
+# A fun way to run this:
+#
+# #!/bin/sh
+#
+# export VERIFY=/path/to/verify-history.py
+# export MYREPOS=/path/to/repos
+#
+# # List the paths in HEAD of the repos (filtering out the directories)
+# for VCFILE in `svn ls -R file://${MYREPOS} | grep -v '/$'`; do
+# echo "Checking ${VCFILE}"
+# ${VERIFY} ${MYREPOS} ${VCFILE}
+# done
+
+import sys
+import string
+from svn import core, repos, fs
+
+class HistoryChecker:
+ def __init__(self, fs_ptr):
+ self.fs_ptr = fs_ptr
+
+ def _check_history(self, path, revision):
+ root = fs.revision_root(self.fs_ptr, revision)
+ changes = fs.paths_changed(root)
+ while True:
+ if path in changes:
+ return 1
+ if path == '/':
+ return 0
+ idx = path.rfind('/')
+ if idx != -1:
+ path = path[:idx]
+ else:
+ return 0
+
+ def add_history(self, path, revision, pool=None):
+ if not self._check_history(path, revision):
+ print("**WRONG** %8d %s" % (revision, path))
+ else:
+ print(" %8d %s" % (revision, path))
+
+
+def check_history(fs_ptr, path, revision):
+ history = HistoryChecker(fs_ptr)
+ repos.history(fs_ptr, path, history.add_history, 1, revision, 1)
+
+
+def main():
+ argc = len(sys.argv)
+ if argc < 3 or argc > 4:
+ print("Usage: %s PATH-TO-REPOS PATH-IN-REPOS [REVISION]" % sys.argv[0])
+ sys.exit(1)
+
+ fs_ptr = repos.fs(repos.open(sys.argv[1]))
+ if argc == 3:
+ revision = fs.youngest_rev(fs_ptr)
+ else:
+ revision = int(sys.argv[3])
+ check_history(fs_ptr, sys.argv[2], revision)
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dev/warn-ignored-err.sh b/tools/dev/warn-ignored-err.sh
new file mode 100755
index 0000000..2e4a106
--- /dev/null
+++ b/tools/dev/warn-ignored-err.sh
@@ -0,0 +1,83 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+HELP="\
+Usage: $0 [--remove] [FILE...]
+
+Insert or remove the GCC attribute \"warn_unused_result\" on each function
+that returns a Subversion error, in the specified files or, by default,
+*.h and *.c in the ./subversion and ./tools trees.
+"
+
+LC_ALL=C
+
+# Parse options
+REMOVE=
+case "$1" in
+--remove) REMOVE=1; shift;;
+--help) echo "$HELP"; exit 0;;
+--*) echo "$0: unknown option \"$1\"; try \"--help\""; exit 1;;
+esac
+
+# Set the positional parameters to the default files if none specified
+if [ $# = 0 ]; then
+ set -- `find subversion/ tools/ -name '*.[ch]'`
+fi
+
+# A line that declares a function return type of "svn_error_t *" looks like:
+# - Possibly leading whitespace, though not often.
+# - Possibly "static" or "typedef".
+# - The return type "svn_error_t *".
+# - Possibly a function or pointer-to-function declarator:
+# - "identifier"
+# - "(identifier)" (used in some typedefs)
+# - "(*identifier)"
+# with either nothing more, or a "(" next (especially not "," or ";" or "="
+# which all indicate a variable rather than a function).
+
+# Regular expressions for "sed"
+# Note: take care in matching back-reference numbers to parentheses
+PREFIX="^\( *\| *static *\| *typedef *\)"
+RET_TYPE="\(svn_error_t *\* *\)"
+IDENT="[a-zA-Z_][a-zA-Z0-9_]*"
+DECLR="\($IDENT\|( *\(\*\|\) *$IDENT *)\)"
+SUFFIX="\($DECLR *\((.*\|\)\|\)$"
+
+# The attribute string to be inserted or removed
+ATTRIB_RE="__attribute__((warn_unused_result))" # regex version of it
+ATTRIB_STR="__attribute__((warn_unused_result))" # plain text version of it
+
+if [ $REMOVE ]; then
+ SUBST="s/$PREFIX$ATTRIB_RE $RET_TYPE$SUFFIX/\1\2\3/"
+else
+ SUBST="s/$PREFIX$RET_TYPE$SUFFIX/\1$ATTRIB_STR \2\3/"
+fi
+
+for F do
+ # Edit the file, leaving a backup suffixed with a tilde
+ { sed -e "$SUBST" "$F" > "$F~1" &&
+ { ! cmp -s "$F" "$F~1"; } &&
+ mv "$F" "$F~" && # F is briefly absent now; a copy could avoid this
+ mv "$F~1" "$F"
+ } ||
+ # If anything went wrong or no change was made, remove the temporary file
+ rm "$F~1"
+done
diff --git a/tools/dev/wc-format.py b/tools/dev/wc-format.py
new file mode 100755
index 0000000..3ecfad0
--- /dev/null
+++ b/tools/dev/wc-format.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+import sqlite3
+import sys
+
+MIN_SINGLE_DB_FORMAT = 19
+
+def get_format(wc_path):
+ entries = os.path.join(wc_path, '.svn', 'entries')
+ wc_db = os.path.join(wc_path, '.svn', 'wc.db')
+
+ formatno = 'not under version control'
+
+ if os.path.exists(wc_db):
+ conn = sqlite3.connect(wc_db)
+ curs = conn.cursor()
+ curs.execute('pragma user_version;')
+ formatno = curs.fetchone()[0]
+ elif os.path.exists(entries):
+ formatno = int(open(entries).readline())
+ elif os.path.exists(wc_path):
+ parent_path = os.path.dirname(os.path.abspath(wc_path))
+ if wc_path != parent_path:
+ formatno = get_format(parent_path)
+ if formatno >= MIN_SINGLE_DB_FORMAT:
+ return formatno
+
+ return formatno
+
+def print_format(wc_path):
+ # see subversion/libsvn_wc/wc.h for format values and information
+ # 1.0.x -> 1.3.x: format 4
+ # 1.4.x: format 8
+ # 1.5.x: format 9
+ # 1.6.x: format 10
+ # 1.7.x: format 29
+ formatno = get_format(wc_path)
+ print('%s: %s' % (wc_path, formatno))
+
+
+if __name__ == '__main__':
+ paths = sys.argv[1:]
+ if not paths:
+ paths = ['.']
+ for wc_path in paths:
+ print_format(wc_path)
diff --git a/tools/dev/wc-ng/bump-to-19.py b/tools/dev/wc-ng/bump-to-19.py
new file mode 100755
index 0000000..6f17a1b
--- /dev/null
+++ b/tools/dev/wc-ng/bump-to-19.py
@@ -0,0 +1,357 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""This program converts a Subversion WC from 1.7-dev format 18 to
+ 1.7-dev format 19 by migrating data from multiple DBs to a single DB.
+
+ Usage: bump-to-19.py WC_ROOT_DIR
+ where WC_ROOT_DIR is the path to the WC root directory.
+
+ Skips non-WC dirs and WC dirs that are not at format 18."""
+
+# TODO: Detect '_svn' as an alternative to '.svn'.
+
+# TODO: Probably should remove any directory that is in state to-be-deleted
+# and doesn't have its 'keep_local' flag set. Otherwise it will
+# become unversioned after commit, whereas format-18 and earlier would
+# have deleted it after commit. Before deleting we should check there
+# are no unversioned things inside, and maybe even check for "local
+# mods" even though that's logically impossible. On the other hand
+# it's not a big deal for the user to clean these up manually.
+
+
+import sys, os, shutil, sqlite3
+
+dot_svn = '.svn'
+
+def dotsvn_path(wc_path):
+ return os.path.join(wc_path, dot_svn)
+
+def db_path(wc_path):
+ return os.path.join(wc_path, dot_svn, 'wc.db')
+
+def pristine_path(wc_path):
+ return os.path.join(wc_path, dot_svn, 'pristine')
+
+def tmp_path(wc_path):
+ return os.path.join(wc_path, dot_svn, 'tmp')
+
+class NotASubversionWC(Exception):
+ def __init__(self, wc_path):
+ self.wc_path = wc_path
+ def __str__(self):
+ return "not a Subversion WC: '" + self.wc_path + "'"
+
+class WrongFormatException(Exception):
+ def __init__(self, wc_dir, format):
+ self.wc_dir = wc_dir
+ self.format = format
+ def __str__(self):
+ return "format is " + str(self.format) + " not 18: '" + self.wc_dir + "'"
+
+
+
+STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB1 = \
+ "INSERT OR REPLACE INTO root.BASE_NODE ( " \
+ " wc_id, local_relpath, repos_id, repos_relpath, parent_relpath, " \
+ " presence, kind, revnum, checksum, translated_size, changed_rev, " \
+ " changed_date, changed_author, depth, symlink_target, last_mod_time, " \
+ " properties, dav_cache, incomplete_children, file_external ) " \
+ "SELECT wc_id, ?1, repos_id, repos_relpath, ?2 AS parent_relpath, " \
+ " presence, kind, revnum, checksum, translated_size, changed_rev, " \
+ " changed_date, changed_author, depth, symlink_target, last_mod_time, " \
+ " properties, dav_cache, incomplete_children, file_external " \
+ "FROM BASE_NODE WHERE local_relpath = ''; "
+
+STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB2 = \
+ "INSERT INTO root.BASE_NODE ( " \
+ " wc_id, local_relpath, repos_id, repos_relpath, parent_relpath, " \
+ " presence, kind, revnum, checksum, translated_size, changed_rev, " \
+ " changed_date, changed_author, depth, symlink_target, last_mod_time, " \
+ " properties, dav_cache, incomplete_children, file_external ) " \
+ "SELECT wc_id, ?1 || '/' || local_relpath, repos_id, repos_relpath, " \
+ " ?1 AS parent_relpath, " \
+ " presence, kind, revnum, checksum, translated_size, changed_rev, " \
+ " changed_date, changed_author, depth, symlink_target, last_mod_time, " \
+ " properties, dav_cache, incomplete_children, file_external " \
+ "FROM BASE_NODE WHERE local_relpath != ''; "
+
+STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB1 = \
+ "INSERT OR REPLACE INTO root.WORKING_NODE ( " \
+ " wc_id, local_relpath, parent_relpath, presence, kind, checksum, " \
+ " translated_size, changed_rev, changed_date, changed_author, depth, " \
+ " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \
+ " moved_here, moved_to, last_mod_time, properties, keep_local ) " \
+ "SELECT wc_id, ?1, ?2 AS parent_relpath, " \
+ " presence, kind, checksum, " \
+ " translated_size, changed_rev, changed_date, changed_author, depth, " \
+ " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \
+ " moved_here, moved_to, last_mod_time, properties, keep_local " \
+ "FROM WORKING_NODE WHERE local_relpath = ''; "
+
+STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB2 = \
+ "INSERT INTO root.WORKING_NODE ( " \
+ " wc_id, local_relpath, parent_relpath, presence, kind, checksum, " \
+ " translated_size, changed_rev, changed_date, changed_author, depth, " \
+ " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \
+ " moved_here, moved_to, last_mod_time, properties, keep_local ) " \
+ "SELECT wc_id, ?1 || '/' || local_relpath, ?1 AS parent_relpath, " \
+ " presence, kind, checksum, " \
+ " translated_size, changed_rev, changed_date, changed_author, depth, " \
+ " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \
+ " moved_here, moved_to, last_mod_time, properties, keep_local " \
+ "FROM WORKING_NODE WHERE local_relpath != ''; "
+
+STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB1 = \
+ "INSERT OR REPLACE INTO root.ACTUAL_NODE ( " \
+ " wc_id, local_relpath, parent_relpath, properties, " \
+ " conflict_old, conflict_new, conflict_working, " \
+ " prop_reject, changelist, text_mod, tree_conflict_data, " \
+ " conflict_data, older_checksum, left_checksum, right_checksum ) " \
+ "SELECT wc_id, ?1, ?2 AS parent_relpath, properties, " \
+ " conflict_old, conflict_new, conflict_working, " \
+ " prop_reject, changelist, text_mod, tree_conflict_data, " \
+ " conflict_data, older_checksum, left_checksum, right_checksum " \
+ "FROM ACTUAL_NODE WHERE local_relpath = ''; "
+
+STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB2 = \
+ "INSERT INTO root.ACTUAL_NODE ( " \
+ " wc_id, local_relpath, parent_relpath, properties, " \
+ " conflict_old, conflict_new, conflict_working, " \
+ " prop_reject, changelist, text_mod, tree_conflict_data, " \
+ " conflict_data, older_checksum, left_checksum, right_checksum ) " \
+ "SELECT wc_id, ?1 || '/' || local_relpath, ?1 AS parent_relpath, properties, " \
+ " conflict_old, conflict_new, conflict_working, " \
+ " prop_reject, changelist, text_mod, tree_conflict_data, " \
+ " conflict_data, older_checksum, left_checksum, right_checksum " \
+ "FROM ACTUAL_NODE WHERE local_relpath != ''; "
+
+STMT_COPY_LOCK_TABLE_TO_WCROOT_DB = \
+ "INSERT INTO root.LOCK " \
+ "SELECT * FROM LOCK; "
+
+STMT_COPY_PRISTINE_TABLE_TO_WCROOT_DB = \
+ "INSERT OR REPLACE INTO root.PRISTINE " \
+ "SELECT * FROM PRISTINE; "
+
+STMT_SELECT_SUBDIR = \
+ "SELECT 1 FROM BASE_NODE WHERE local_relpath=?1 AND kind='subdir'" \
+ "UNION " \
+ "SELECT 0 FROM WORKING_NODE WHERE local_relpath=?1 AND kind='subdir';"
+
+def copy_db_rows_to_wcroot(wc_subdir_relpath):
+ """Copy all relevant table rows from the $PWD/WC_SUBDIR_RELPATH/.svn/wc.db
+ into $PWD/.svn/wc.db."""
+
+ wc_root_path = ''
+ wc_subdir_path = wc_subdir_relpath
+ wc_subdir_parent_relpath = os.path.dirname(wc_subdir_relpath)
+
+ try:
+ db = sqlite3.connect(db_path(wc_subdir_path))
+ except:
+ raise NotASubversionWC(wc_subdir_path)
+ c = db.cursor()
+
+ c.execute("ATTACH '" + db_path(wc_root_path) + "' AS 'root'")
+
+ ### TODO: the REPOSITORY table. At present we assume there is only one
+ # repository in use and its repos_id is consistent throughout the WC.
+ # That's not always true - e.g. "svn switch --relocate" creates repos_id
+ # 2, and then "svn mkdir" uses repos_id 1 in the subdirectory. */
+
+ c.execute(STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB1,
+ (wc_subdir_relpath, wc_subdir_parent_relpath))
+ c.execute(STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB2,
+ (wc_subdir_relpath, ))
+ c.execute(STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB1,
+ (wc_subdir_relpath, wc_subdir_parent_relpath))
+ c.execute(STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB2,
+ (wc_subdir_relpath, ))
+ c.execute(STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB1,
+ (wc_subdir_relpath, wc_subdir_parent_relpath))
+ c.execute(STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB2,
+ (wc_subdir_relpath, ))
+ c.execute(STMT_COPY_LOCK_TABLE_TO_WCROOT_DB)
+ c.execute(STMT_COPY_PRISTINE_TABLE_TO_WCROOT_DB)
+
+ db.commit()
+ db.close()
+
+
+def move_and_shard_pristine_files(old_wc_path, new_wc_path):
+ """Move all pristine text files from 'OLD_WC_PATH/.svn/pristine/'
+ into 'NEW_WC_PATH/.svn/pristine/??/', creating shard dirs where
+ necessary."""
+
+ old_pristine_dir = pristine_path(old_wc_path)
+ new_pristine_dir = pristine_path(new_wc_path)
+
+ if not os.path.exists(old_pristine_dir):
+ # That's fine, assuming there are no pristine texts.
+ return
+
+ for basename in os.listdir(old_pristine_dir):
+ shard = basename[:2]
+ if shard == basename: # already converted
+ continue
+ old = os.path.join(old_pristine_dir, basename)
+ new = os.path.join(new_pristine_dir, shard, basename)
+ os.renames(old, new)
+
+def select_subdir(wc_subdir_path):
+ """ Return True if wc_subdir_path is a known to be a versioned subdir,
+ False otherwise."""
+
+ try:
+ db = sqlite3.connect(db_path(''))
+ except:
+ raise NotASubversionWC(wc_subdir_path)
+ c = db.cursor()
+ c.execute(STMT_SELECT_SUBDIR, (wc_subdir_path,))
+ if c.fetchone() is None:
+ return False
+ else:
+ return True
+
+
+def migrate_wc_subdirs(wc_root_path):
+ """Move Subversion metadata from the admin dir of each subdirectory
+ below WC_ROOT_PATH into WC_ROOT_PATH's own admin dir."""
+
+ old_cwd = os.getcwd()
+ os.chdir(wc_root_path)
+
+ # Keep track of which dirs we've migrated so we can delete their .svn's
+ # afterwards. Done this way because the tree walking is top-down and if
+ # we deleted the .svn before walking into the subdir, it would look like
+ # an unversioned subdir.
+ migrated_subdirs = []
+
+ # For each directory in the WC, try to migrate each of its subdirs (DIRS).
+ # Done this way because (a) os.walk() gives us lists of subdirs, and (b)
+ # it's easy to skip the WC root dir.
+ for dir_path, dirs, files in os.walk('.'):
+
+ # don't walk into the '.svn' subdirectory
+ try:
+ dirs.remove(dot_svn)
+ except ValueError:
+ # a non-WC dir: don't walk into any subdirectories
+ print("skipped: ", NotASubversionWC(dir_path))
+ del dirs[:]
+ continue
+
+ # Try to migrate each other subdirectory
+ for dir in dirs[:]: # copy so we can remove some
+ wc_subdir_path = os.path.join(dir_path, dir)
+ if wc_subdir_path.startswith('./'):
+ wc_subdir_path = wc_subdir_path[2:]
+
+ if not select_subdir(wc_subdir_path):
+ print("skipped:", wc_subdir_path)
+ dirs.remove(dir)
+ continue
+
+ try:
+ check_wc_format_number(wc_subdir_path)
+ print("migrating '" + wc_subdir_path + "'")
+ copy_db_rows_to_wcroot(wc_subdir_path)
+ move_and_shard_pristine_files(wc_subdir_path, '.')
+ migrated_subdirs += [wc_subdir_path]
+ except (WrongFormatException, NotASubversionWC) as e:
+ print("skipped:", e)
+ # don't walk into it
+ dirs.remove(dir)
+ continue
+
+ # Delete the remaining parts of the migrated .svn dirs
+ # Make a note of any problems in deleting.
+ failed_delete_subdirs = []
+ for wc_subdir_path in migrated_subdirs:
+ print("deleting " + dotsvn_path(wc_subdir_path))
+ try:
+ os.remove(db_path(wc_subdir_path))
+ if os.path.exists(pristine_path(wc_subdir_path)):
+ os.rmdir(pristine_path(wc_subdir_path))
+ shutil.rmtree(tmp_path(wc_subdir_path))
+ os.rmdir(dotsvn_path(wc_subdir_path))
+ except Exception as e:
+ print(e)
+ failed_delete_subdirs += [wc_subdir_path]
+
+ # Notify any problems in deleting
+ if failed_delete_subdirs:
+ print("Failed to delete the following directories. Please delete them manually.")
+ for wc_subdir_path in failed_delete_subdirs:
+ print(" " + dotsvn_path(wc_subdir_path))
+
+ os.chdir(old_cwd)
+
+
+def check_wc_format_number(wc_path):
+ """Check that the WC format of the WC dir WC_PATH is 18.
+ Raise a WrongFormatException if not."""
+
+ try:
+ db = sqlite3.connect(db_path(wc_path))
+ except sqlite3.OperationalError:
+ raise NotASubversionWC(wc_path)
+ c = db.cursor()
+ c.execute("PRAGMA user_version;")
+ format = c.fetchone()[0]
+ db.commit()
+ db.close()
+
+ if format != 18:
+ raise WrongFormatException(wc_path, format)
+
+
+def bump_wc_format_number(wc_path):
+ """Bump the WC format number of the WC dir WC_PATH to 19."""
+
+ try:
+ db = sqlite3.connect(db_path(wc_path))
+ except sqlite3.OperationalError:
+ raise NotASubversionWC(wc_path)
+ c = db.cursor()
+ c.execute("PRAGMA user_version = 19;")
+ db.commit()
+ db.close()
+
+
+if __name__ == '__main__':
+
+ if len(sys.argv) != 2:
+ print(__doc__)
+ sys.exit(1)
+
+ wc_root_path = sys.argv[1]
+
+ try:
+ check_wc_format_number(wc_root_path)
+ except (WrongFormatException, NotASubversionWC) as e:
+ print("error:", e)
+ sys.exit(1)
+
+ print("merging subdir DBs into single DB '" + wc_root_path + "'")
+ move_and_shard_pristine_files(wc_root_path, wc_root_path)
+ migrate_wc_subdirs(wc_root_path)
+ bump_wc_format_number(wc_root_path)
+
diff --git a/tools/dev/wc-ng/count-progress.py b/tools/dev/wc-ng/count-progress.py
new file mode 100755
index 0000000..bf06512
--- /dev/null
+++ b/tools/dev/wc-ng/count-progress.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import os, sys
+
+SKIP = ['deprecated.c',
+ 'entries.c',
+ 'entries.h',
+ 'old-and-busted.c']
+
+TERMS = ['svn_wc_adm_access_t',
+ 'svn_wc_entry_t',
+ 'svn_wc__node_',
+ 'svn_wc__db_temp_',
+ 'svn_wc__db_node_hidden',
+ 'svn_wc__loggy',
+ 'svn_wc__db_wq_add',
+ ]
+
+
+def get_files_in(path):
+ names = os.listdir(path)
+ for skip in SKIP:
+ try:
+ names.remove(skip)
+ except ValueError:
+ pass
+ return [os.path.join(path, fname) for fname in names
+ if fname.endswith('.c') or fname.endswith('.h')]
+
+
+def count_terms_in(path):
+ files = get_files_in(path)
+ counts = {}
+ for term in TERMS:
+ counts[term] = 0
+ for filepath in get_files_in(path):
+ contents = open(filepath).read()
+ for term in TERMS:
+ counts[term] += contents.count(term)
+ return counts
+
+
+def print_report(wcroot):
+ client = count_terms_in(os.path.join(wcroot, 'subversion', 'libsvn_client'))
+ wc = count_terms_in(os.path.join(wcroot, 'subversion', 'libsvn_wc'))
+
+ client_total = 0
+ wc_total = 0
+
+ FMT = '%22s |%14s |%10s |%6s'
+ SEP = '%s+%s+%s+%s' % (23*'-', 15*'-', 11*'-', 7*'-')
+
+ print(FMT % ('', 'libsvn_client', 'libsvn_wc', 'Total'))
+ print(SEP)
+ for term in TERMS:
+ print(FMT % (term, client[term], wc[term], client[term] + wc[term]))
+ client_total += client[term]
+ wc_total += wc[term]
+ print(SEP)
+ print(FMT % ('Total', client_total, wc_total, client_total + wc_total))
+
+
+def usage():
+ print("""\
+Usage: %s [WCROOT]
+ %s --help
+
+Show statistics related to outstanding WC-NG code conversion work
+items in working copy branch root WCROOT. If WCROOT is omitted, this
+program will attempt to guess it using the assumption that it is being
+run from within the working copy of interest."""
+% (sys.argv[0], sys.argv[0]))
+
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ if '--help' in sys.argv[1:]:
+ usage()
+
+ print_report(sys.argv[1])
+ else:
+ cwd = os.path.abspath(os.getcwd())
+ idx = cwd.rfind(os.sep + 'subversion')
+ if idx > 0:
+ wcroot = cwd[:idx]
+ else:
+ idx = cwd.rfind(os.sep + 'tools')
+ if idx > 0:
+ wcroot = cwd[:idx]
+ elif os.path.exists(os.path.join(cwd, 'subversion')):
+ wcroot = cwd
+ else:
+ print("ERROR: the root of 'trunk' cannot be located -- please provide")
+ sys.exit(1)
+ print_report(wcroot)
diff --git a/tools/dev/wc-ng/gather-data.sh b/tools/dev/wc-ng/gather-data.sh
new file mode 100755
index 0000000..fe481b9
--- /dev/null
+++ b/tools/dev/wc-ng/gather-data.sh
@@ -0,0 +1,78 @@
+#/usr/bin/env sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# Trap Ctrl-C
+trap 'exit 1' 2
+
+# Some useful variables
+REPOS=file:///home/hwright/dev/test/svn-mirror
+WC=blech
+REV_LIST=revs_list
+SCRIPT=count-progress.py
+DATA=data.csv
+
+# Sync up the local repo
+svnsync sync $REPOS
+
+# Grab the list of revisions of interest on trunk
+svn log -q -r0:HEAD $REPOS/trunk \
+ | grep -v '^----' \
+ | cut -f1 -d '|' \
+ | cut -b2- > $REV_LIST
+
+# Export the counting script
+if [ -e $SCRIPT ]; then
+ rm $SCRIPT
+fi
+svn export $REPOS/trunk/tools/dev/wc-ng/$SCRIPT $SCRIPT
+
+# Checkout a working copy
+if [ ! -d "$WC" ]; then
+ svn co $REPOS/trunk $WC -r1
+fi
+
+# Get all the symbols of interest from the counting script and write
+# them out at the headers in our csv file
+LINE=""
+for l in `./$SCRIPT $WC | tail -n +3 | grep -v '^----' | cut -f 1 -d '|'`; do
+ LINE="$LINE,$l"
+done
+echo "Revision$LINE" > $DATA
+
+# Iterate over all the revisions of interest
+export SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS='yes'
+for r in `cat $REV_LIST`; do
+ svn up -r$r $WC -q
+
+ # Do the count for that rev, and put the data in our data file
+ LINE=""
+ for l in `./$SCRIPT $WC | tail -n +3 | grep -v '^----' | cut -f 4 -d '|'`; do
+ LINE="$LINE,$l"
+ done
+ echo "$r$LINE" >> $DATA
+
+ echo "Done with revision $r"
+done
+unset SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS
+
+# Cleanup
+rm -rf $WC
+rm $REV_LIST
diff --git a/tools/dev/wc-ng/graph-data.py b/tools/dev/wc-ng/graph-data.py
new file mode 100755
index 0000000..a8f0c5e
--- /dev/null
+++ b/tools/dev/wc-ng/graph-data.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import matplotlib.mlab as mlab
+import matplotlib.pyplot as plt
+from matplotlib import pylab
+import numpy as np
+
+import csv
+import sys
+
+min_rev = 35000
+
+data_reader = csv.reader(open('data.csv'))
+
+data = []
+for row in data_reader:
+ row = row[:-1]
+ if row[0] == 'Revision':
+ data.append(row)
+ continue
+
+ if int(row[0]) < min_rev:
+ continue
+
+ for i, x in enumerate(row):
+ if i <= 1:
+ row[i] = int(row[i])
+ else:
+ row[i] = int(row[i-1]) + int(row[i])
+ data.append(row)
+
+x = [d[0] for d in data[1:]]
+data = [d[1:] for d in data]
+y = zip(*data)
+
+l = []
+for i, foo in enumerate(y):
+ ln = plt.plot(x, foo[1:], linewidth=1)
+ l.append(ln)
+
+plt.figlegend(l, data[0], 'lower left')
+plt.fill_between(x, 0, y[0][1:], facecolor=l[0].color)
+#for i in range(0, len(y)-1):
+# plt.fill_between(x, y[i][1:], y[i+1][1:])
+plt.xlabel('Revision')
+plt.ylabel('Symbol Count')
+plt.show()
+
+png = open('chart2.png', 'w')
+plt.savefig(png)
diff --git a/tools/dev/wc-ng/populate-pristine.py b/tools/dev/wc-ng/populate-pristine.py
new file mode 100755
index 0000000..4778cfc
--- /dev/null
+++ b/tools/dev/wc-ng/populate-pristine.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+A script that takes a .svn/pristine/ hierarchy, with its existing
+.svn/wc.db database, and populates the database's PRISTINE table
+accordingly. (Use 'svn cleanup' to remove unreferenced pristines.)
+
+Usage:
+
+ %s /path/to/wc [...]
+"""
+
+# TODO: resolve the NotImplemented() in __main__
+
+# TODO: increment refcount upon collision
+# TODO: add <given file>, not just argv[1]/.svn/pristine/??/*
+
+import hashlib
+import os
+import re
+import sqlite3
+import sys
+
+# ### This could require any other format that has the same PRISTINE schema
+# ### and semantics.
+FORMAT = 22
+BUFFER_SIZE = 4 * 1024
+
+class UnknownFormat(Exception):
+ def __init__(self, formatno):
+ self.formatno = formatno
+
+def open_db(wc_path):
+ wc_db = os.path.join(wc_path, '.svn', 'wc.db')
+ conn = sqlite3.connect(wc_db)
+ curs = conn.cursor()
+ curs.execute('pragma user_version;')
+ formatno = int(curs.fetchone()[0])
+ if formatno > FORMAT:
+ raise UnknownFormat(formatno)
+ return conn
+
+_sha1_re = re.compile(r'^[0-9a-f]{40}$')
+
+def md5_of(path):
+ fd = os.open(path, os.O_RDONLY)
+ ctx = hashlib.md5()
+ while True:
+ s = os.read(fd, BUFFER_SIZE)
+ if len(s):
+ ctx.update(s)
+ else:
+ os.close(fd)
+ return ctx.hexdigest()
+
+INSERT_QUERY = """
+ INSERT OR REPLACE
+ INTO pristine(checksum,compression,size,refcount,md5_checksum)
+ VALUES (?,?,?,?,?)
+"""
+
+def populate(wc_path):
+ conn = open_db(wc_path)
+ sys.stdout.write("Updating '%s': " % wc_path)
+ for dirname, dirs, files in os.walk(os.path.join(wc_path, '.svn/pristine/')):
+ # skip everything but .svn/pristine/xx/
+ if os.path.basename(os.path.dirname(dirname)) == 'pristine':
+ sys.stdout.write("'%s', " % os.path.basename(dirname))
+ for f in filter(lambda x: _sha1_re.match(x), files):
+ fullpath = os.path.join(dirname, f)
+ conn.execute(INSERT_QUERY,
+ ('$sha1$'+f, None, os.stat(fullpath).st_size, 1,
+ '$md5 $'+md5_of(fullpath)))
+ # periodic transaction commits, for efficiency
+ conn.commit()
+ else:
+ sys.stdout.write(".\n")
+
+if __name__ == '__main__':
+ raise NotImplemented("""Subversion does not know yet to avoid fetching
+ a file when a file with matching sha1 appears in the PRISTINE table.""")
+
+ paths = sys.argv[1:]
+ if not paths:
+ paths = ['.']
+ for wc_path in paths:
+ try:
+ populate(wc_path)
+ except UnknownFormat as e:
+ sys.stderr.write("Don't know how to handle '%s' (format %d)'\n"
+ % (wc_path, e.formatno))
diff --git a/tools/dev/wc-ng/svn-wc-db-tester.c b/tools/dev/wc-ng/svn-wc-db-tester.c
new file mode 100644
index 0000000..ccdd102
--- /dev/null
+++ b/tools/dev/wc-ng/svn-wc-db-tester.c
@@ -0,0 +1,269 @@
+/* svn-wc-db-tester.c
+ *
+ * This is a crude command line tool that makes it possible to
+ * run the wc-db validation checks directly.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+#include "svn_utf.h"
+#include "svn_path.h"
+#include "svn_opt.h"
+#include "svn_version.h"
+
+#include "private/svn_wc_private.h"
+#include "private/svn_cmdline_private.h"
+
+#include "../../../subversion/libsvn_wc/wc.h"
+#include "../../../subversion/libsvn_wc/wc_db.h"
+
+#include "svn_private_config.h"
+
+#define OPT_VERSION SVN_OPT_FIRST_LONGOPT_ID
+
+static svn_error_t *
+version(apr_pool_t *pool)
+{
+ return svn_opt_print_help4(NULL, "svn-wc-db-tester", TRUE, FALSE, FALSE,
+ NULL, NULL, NULL, NULL, NULL, NULL, pool);
+}
+
+static void
+usage(apr_pool_t *pool)
+{
+ svn_error_clear(svn_cmdline_fprintf
+ (stderr, pool,
+ _("Type 'svn-wc-db-tester --help' for usage.\n")));
+}
+
+struct verify_baton
+{
+ svn_boolean_t found_err;
+};
+
+static svn_error_t *
+verify_cb(void *baton,
+ const char *wc_abspath,
+ const char *local_relpath,
+ int op_depth,
+ int id,
+ const char *msg,
+ apr_pool_t *scratch_pool)
+{
+ struct verify_baton *vb = baton;
+
+ if (op_depth >= 0)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "%s (depth=%d) DBV%04d: %s\n",
+ local_relpath, op_depth, id, msg));
+ }
+ else
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "%s DBV%04d: %s\n",
+ local_relpath, id, msg));
+ }
+
+ vb->found_err = TRUE;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_db(int argc, const char *path, apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc_context_t *wc_ctx;
+ struct verify_baton vb = { FALSE };
+
+ /* Read the parameters */
+ path = svn_dirent_internal_style(path, pool);
+
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+
+ SVN_ERR(svn_wc__db_verify_db_full(wc_ctx->db, local_abspath,
+ verify_cb, &vb, pool));
+
+ if (vb.found_err)
+ return svn_error_create(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("Found one or more potential wc.db inconsistencies"));
+
+ return SVN_NO_ERROR;
+}
+
+
+static void
+help(const apr_getopt_option_t *options, apr_pool_t *pool)
+{
+ svn_error_clear
+ (svn_cmdline_fprintf
+ (stdout, pool,
+ _("usage: svn-wc-db-tester [OPTIONS] WC_PATH\n\n"
+ " Run verifications on the working copy\n"
+ "\n"
+ " WC_PATH's parent directory must be a working copy, otherwise a\n"
+ " tree conflict cannot be raised.\n"
+ "\n"
+ "Valid options:\n")));
+ while (options->description)
+ {
+ const char *optstr;
+ svn_opt_format_option(&optstr, options, TRUE, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr));
+ ++options;
+ }
+}
+
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_subr", svn_subr_version },
+ { "svn_wc", svn_wc_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ apr_getopt_t *os;
+ const apr_getopt_option_t options[] =
+ {
+ {"help", 'h', 0, N_("display this help")},
+ {"version", OPT_VERSION, 0,
+ N_("show program version information")},
+ {0, 0, 0, 0}
+ };
+ apr_array_header_t *remaining_argv;
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+#if defined(WIN32) || defined(__CYGWIN__)
+ /* Set the working copy administrative directory name. */
+ if (getenv("SVN_ASP_DOT_NET_HACK"))
+ {
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
+ }
+#endif
+
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+
+ os->interleave = 1;
+ while (1)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(os, options, &opt, &arg);
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ switch (opt)
+ {
+ case 'h':
+ help(options, pool);
+ return SVN_NO_ERROR;
+ case OPT_VERSION:
+ SVN_ERR(version(pool));
+ return SVN_NO_ERROR;
+ default:
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* Convert the remaining arguments to UTF-8. */
+ remaining_argv = apr_array_make(pool, 0, sizeof(const char *));
+ while (os->ind < argc)
+ {
+ const char *s;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++], pool));
+ APR_ARRAY_PUSH(remaining_argv, const char *) = s;
+ }
+
+ if (remaining_argv->nelts != 1)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Do the main task */
+ SVN_ERR(verify_db(remaining_argv->nelts,
+ APR_ARRAY_IDX(remaining_argv, 0, const char *),
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svn-wc-db-tester", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svn-wc-db-tester: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/dev/which-error.py b/tools/dev/which-error.py
new file mode 100755
index 0000000..6c683cb
--- /dev/null
+++ b/tools/dev/which-error.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# which-error.py: Print semantic Subversion error code names mapped from
+# their numeric error code values
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/dev/which-error.py $
+# $LastChangedDate: 2016-04-30 08:16:53 +0000 (Sat, 30 Apr 2016) $
+# $LastChangedBy: stefan2 $
+# $LastChangedRevision: 1741723 $
+#
+
+import errno
+import sys
+import os.path
+import re
+
+try:
+ from svn import core
+except ImportError as e:
+ sys.stderr.write("ERROR: Unable to import Subversion's Python bindings: '%s'\n" \
+ "Hint: Set your PYTHONPATH environment variable, or adjust your " \
+ "PYTHONSTARTUP\nfile to point to your Subversion install " \
+ "location's svn-python directory.\n" % e)
+ sys.stderr.flush()
+ sys.exit(1)
+
+
+def usage_and_exit():
+ progname = os.path.basename(sys.argv[0])
+ sys.stderr.write("""Usage: 1. %s ERRNUM [...]
+ 2. %s parse
+ 3. %s list
+
+Print numeric and semantic error code information for Subversion error
+codes. This can be done in variety of ways:
+
+ 1. For each ERRNUM, list the error code information.
+
+ 2. Parse standard input as if it was error stream from a debug-mode
+ Subversion command-line client, echoing that input to stdout,
+ followed by the error code information for codes found in use in
+ that error stream.
+
+ 3. Simply list the error code information for all known such
+ mappings.
+
+""" % (progname, progname, progname))
+ sys.exit(1)
+
+def get_errors():
+ errs = {}
+ ## errno values.
+ errs.update(errno.errorcode)
+ ## APR-defined errors, from apr_errno.h.
+ dirname = os.path.dirname(os.path.realpath(__file__))
+ for line in open(os.path.join(dirname, 'aprerr.txt')):
+ # aprerr.txt parsing duplicated in gen_base.py:write_errno_table()
+ if line.startswith('#'):
+ continue
+ key, _, val = line.split()
+ errs[int(val)] = key
+ ## Subversion errors, from svn_error_codes.h.
+ for key in vars(core):
+ if key.find('SVN_ERR_') == 0:
+ try:
+ val = int(vars(core)[key])
+ errs[val] = key
+ except:
+ pass
+ return errs
+
+def print_error(code):
+ try:
+ print('%08d %s' % (code, __svn_error_codes[code]))
+ except KeyError:
+ if code == -41:
+ print("Sit by a lake.")
+ elif code >= 120100 and code < 121000:
+ print('%08d <error code from libserf; see serf.h>' % (code))
+ else:
+ print('%08d *** UNKNOWN ERROR CODE ***' % (code))
+
+if __name__ == "__main__":
+ global __svn_error_codes
+ __svn_error_codes = get_errors()
+ codes = []
+ if len(sys.argv) < 2:
+ usage_and_exit()
+
+ # Get a list of known codes
+ if sys.argv[1] == 'list':
+ if len(sys.argv) > 2:
+ usage_and_exit()
+ codes = sorted(__svn_error_codes.keys())
+
+ # Get a list of code by parsing stdin for apr_err=CODE instances
+ elif sys.argv[1] == 'parse':
+ if len(sys.argv) > 2:
+ usage_and_exit()
+ while True:
+ line = sys.stdin.readline()
+ if not line:
+ break
+ sys.stdout.write(line)
+ match = re.match(r'^.*apr_err=([0-9]+)[^0-9].*$', line)
+ if match:
+ codes.append(int(match.group(1)))
+
+ # Get the list of requested codes
+ else:
+ for code in sys.argv[1:]:
+ try:
+ code = code.lstrip('EW')
+ codes.append(int(code))
+ except ValueError:
+ usage_and_exit()
+
+ # Print the harvest codes
+ for code in codes:
+ print_error(code)
+
+
diff --git a/tools/dev/windows-build/Makefile b/tools/dev/windows-build/Makefile
new file mode 100644
index 0000000..c0e1b15
--- /dev/null
+++ b/tools/dev/windows-build/Makefile
@@ -0,0 +1,155 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+CONFIG=debug
+#CONFIG=release
+# will be appended to 'svn --version --quiet' output; set to zero to suppress
+BUILDDESCR=0
+
+MSBUILD=msbuild subversion_vcnet.sln /nologo /v:q /p:Configuration=$(CONFIG)
+CP=cp
+TEEPATH=C:\Path\To\Parent\Of\tee\dot\exe\and\other\utils\as\needed\see\below.80
+
+SOURCESDIR=C:\Path\To\Dependency\Sources
+SVNDIR=C:\Path\To\Subversion\SourceTree
+TARGETDIR=$(SVNDIR)\dist
+INSTALLDIR=E:\svn
+
+EXPATVER=2.0.0
+HTTPDVER=2.2.13
+SERFVER=1.1.0
+OPENSSLVER=0.9.8k
+SQLITEVER=3.6.3
+ZLIBVER=1.2.3
+#ENABLE_ML=--enable-ml
+
+PATCHESDIR=$(HOME)\mydiffs\svn
+OPENSSLDIR=$(SOURCESDIR)\openssl-$(OPENSSLVER)
+EXPATDIR=$(SOURCESDIR)\expat-$(EXPATVER)
+HTTPDDIR=$(SOURCESDIR)\httpd-$(HTTPDVER)
+#APRDIR=$(SOURCESDIR)\apr
+#APRUTILDIR=$(SOURCESDIR)\apr-util
+#APRICONVDIR=$(SOURCESDIR)\apr-iconv
+APRDIR=$(HTTPDDIR)\srclib\apr
+APRUTILDIR=$(HTTPDDIR)\srclib\apr-util
+APRICONVDIR=$(HTTPDDIR)\srclib\apr-iconv
+SQLITEDIR=$(SOURCESDIR)\sqlite-amalgamation
+ZLIBDIR=$(SOURCESDIR)\zlib-$(ZLIBVER)
+SERFDIR=$(SOURCESDIR)\serf-$(SERFVER)
+
+all:
+ @echo Available targets: newfiles versionstamp
+ @echo Available targets: config
+ @echo Available targets: progname testname
+ @echo Available targets: all1 all2
+ @echo Available targets: buildlog package
+ @echo Available targets: check checklog
+TARGETDIRset: SVNDIRset
+ if X$(TARGETDIR) == X exit 1
+SVNDIRset:
+ if X$(SVNDIR) == X exit 1
+
+removealllocalmods:
+ svn revert -R .
+ svn status --no-ignore | xargs rm -rf --
+ svn status --no-ignore
+ # last, in case of wc format bump
+ rmdir /s /q dist local Release Debug
+
+newfiles: SVNDIRset
+ xcopy /s /y $(PATCHESDIR)\newfiles $(SVNDIR)
+versionstamp:
+ perl tools\dev\windows-build\document-version.pl subversion\include\svn_version.h $(TARGETDIR) $(SVNDIR) $(BUILDDESCR)
+ svn diff subversion\include\svn_version.h
+
+cleanup1: TARGETDIR
+ del log.all-tests log.gen-make.py log.devenv log.win-tests
+ rmdir /s /q $(TARGETDIR)\bin
+
+clean:
+ @echo "Sorry, '$@' target not yet implemented" >&2
+# TODO also copy sqlite3.dll if it's used
+install: TARGETDIRset
+ test ! -d $(INSTALLDIR)
+ mkdir $(INSTALLDIR)\bin
+ pushd $(TARGETDIR)\bin &&\
+ $(CP) *.exe $(INSTALLDIR)/bin &&\
+ $(CP) libapr*.dll $(INSTALLDIR)/bin &&\
+ $(CP) libeay32.dll $(INSTALLDIR)/bin &&\
+ $(CP) ssleay32.dll $(INSTALLDIR)/bin &&\
+ $(CP) libsvn*.dll $(INSTALLDIR)/bin &&\
+ $(CP) ..\*.diff $(INSTALLDIR) &&\
+ popd
+
+targetdir: TARGETDIRset
+ test -d $(TARGETDIR)\bin || mkdir $(TARGETDIR)\bin
+
+# TODO: pass --with-apr-* if you don't have httpd; make --with-* args optional
+config: targetdir
+ python gen-make.py --$(CONFIG) --with-httpd=$(HTTPDDIR) --with-serf=$(SERFDIR) --with-openssl=$(OPENSSLDIR) --with-sqlite=$(SQLITEDIR) --with-zlib=$(ZLIBDIR) $(ENABLE_ML) --vsnet-version=2008 -t vcproj 2>&1 | tee log.gen-make
+
+# Visual Studio 2008
+libsvn_auth_gnome_keyring libsvn_auth_kwallet libsvn_client libsvn_delta libsvn_diff libsvn_fs libsvn_fs_base libsvn_fs_fs libsvn_fs_util libsvn_ra libsvn_ra_local libsvn_ra_serf libsvn_ra_svn libsvn_repos libsvn_subr libsvn_wc: targetdir
+ $(MSBUILD) /t:Libraries\$@
+ $(MAKE) package
+svn svnadmin svndumpfilter svnlook svnmucc svnserve svnsync svnversion svnrdump entries-dump: targetdir
+ $(MSBUILD) /t:Programs\$@
+ $(MAKE) package
+auth-test cache-test changes-test checksum-test client-test compat-test config-test db-test diff-diff3-test dir-delta-editor dirent_uri-test error-test fs-base-test fs-pack-test fs-test hashdump-test key-test locks-test mergeinfo-test opt-test path-test ra-local-test random-test repos-test revision-test skel-test stream-test string-test strings-reps-test svn_test_fs svn_test_main svndiff-test target-test time-test translate-test tree-conflict-data-test utf-test vdelta-test window-test: targetdir
+ $(MSBUILD) /t:Tests\$@
+ $(MAKE) package
+
+__ALL__ __ALL_TESTS__: targetdir
+ $(MSBUILD) /t:$@
+ $(MAKE) package
+all1: targetdir
+ $(MSBUILD) /t:__ALL__
+ $(MAKE) package
+ @echo TODO entries-test
+all2: targetdir
+ $(MSBUILD) /t:__ALL_TESTS__
+ $(MAKE) package
+
+package:
+ test -d $(SVNDIR)\$(CONFIG)\Subversion\tests\cmdline || mkdir $(SVNDIR)\$(CONFIG)\Subversion\tests\cmdline
+ test -d $(TARGETDIR)\bin || mkdir $(TARGETDIR)\bin
+ for %%i in (svn svnadmin svndumpfilter svnlook svnserve svnsync svnversion svnrdump svnmucc) do @$(CP) $(CONFIG)\subversion\%%i\%%i.exe $(TARGETDIR)\bin
+ for %%i in (diff diff3 diff4) do @if exist $(CONFIG)\tools\diff\%%i.exe $(CP) $(CONFIG)\tools\diff\%%i.exe $(TARGETDIR)\bin
+ $(CP) $(APRDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(APRUTILDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(APRICONVDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(OPENSSLDIR)\out32dll/*.dll $(TARGETDIR)\bin
+ for %%i in (client delta diff fs ra repos subr wc) do @$(CP) $(CONFIG)\subversion\libsvn_%%i\*.dll $(TARGETDIR)\bin
+
+buildlog:
+ gvim -c "set autoread nowrap" -c "/\(\<0 \)\@<!error" log.devenv
+# 'make check'
+# TODO: also support svncheck, etc
+check:
+ echo %date% %time% :: Starting fsfs file >> log.all-tests
+ python win-tests.py --verbose --cleanup --bin=$(TARGETDIR)\bin --$(CONFIG) -f fsfs 2>&1 | %TEEPATH%\tee log.win-tests
+ echo %date% %time% :: Finished fsfs file >> log.all-tests
+
+
+# check errors
+checklog:
+ gvim -c "set autoread" -p log.win-tests *\*.log "+silent! /X\@<!FAIL\|XPASS"
+
+tags: .
+ REM vim +Ctags +quit
+ ctags -R .
+ $(CP) tags ..\svntags
diff --git a/tools/dev/windows-build/README b/tools/dev/windows-build/README
new file mode 100644
index 0000000..cd05cd8
--- /dev/null
+++ b/tools/dev/windows-build/README
@@ -0,0 +1,22 @@
+Makefiles for automating the Windows build.
+
+Should work either either nmake or GNU make.
+
+Doesn't require Cygwin.
+
+* TODO:
+ - document: how to use
+ - known bugs/shortcomings
+ - separate the configurable parts to a Makefile.local.tmpl file
+ - allow serf,httpd,neon,etc to be optional
+ - auto-generate the list of individual targets from build.conf
+ (that list is not used by the default make targets)
+ - add 'make tools' to the default windows build
+
+See: http://svn.haxx.se/users/archive-2009-07/0764.shtml
+(Message-Id: <alpine.561.2.00.0907241718550.6824@daniel2.local>)
+
+
+'cp' and friends can be obtained from gnuwin32.sf.net, unxutils.sf.net,
+cygwin, etc. Or tweak the makefile to use cp.pl or the built-in 'copy'
+command instead.:
diff --git a/tools/dev/windows-build/document-version.pl b/tools/dev/windows-build/document-version.pl
new file mode 100644
index 0000000..398762b
--- /dev/null
+++ b/tools/dev/windows-build/document-version.pl
@@ -0,0 +1,48 @@
+#!/usr/local/bin/perl -w
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use strict;
+
+use Tie::File;
+#use Cwd 'getcwd';
+
+die "Bad args '@ARGV'" unless (@ARGV >= 3 && @ARGV <= 4);
+
+my ($filename, $TARGETDIR, $SVNDIR, $BUILDDESCR) = (@ARGV, "");
+
+my (@file, $version, $lines);
+
+tie (@file, 'Tie::File', $filename)
+ or die $!;
+
+$version = `svnversion -n` or die;
+$version =~ tr/M//d;
+$version .= '-' . $BUILDDESCR if $BUILDDESCR;
+
+/^#define SVN_VER_TAG/ and s/(?<=dev build).*(?=\)"$)/-r$version/
+ for @file;
+/^#define SVN_VER_NUMTAG/ and s/(?<=-dev).*(?="$)/-r$version/
+ for @file;
+
+mkdir $TARGETDIR unless -d $TARGETDIR;
+
+chdir $SVNDIR;
+system "svn diff -x-p > $TARGETDIR\\$version.diff"
+ and die $!;
+
diff --git a/tools/dev/x509-parser.c b/tools/dev/x509-parser.c
new file mode 100644
index 0000000..5ed2ab4
--- /dev/null
+++ b/tools/dev/x509-parser.c
@@ -0,0 +1,179 @@
+/* x509-parser.c -- print human readable info from an X.509 certificate
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_cmdline.h"
+#include "svn_string.h"
+#include "svn_dirent_uri.h"
+#include "svn_io.h"
+#include "svn_base64.h"
+#include "svn_x509.h"
+#include "svn_time.h"
+
+#include "svn_private_config.h"
+
+#define PEM_BEGIN_CERT "-----BEGIN CERTIFICATE-----"
+#define PEM_END_CERT "-----END CERTIFICATE-----"
+
+static svn_error_t *
+show_cert(const svn_string_t *der_cert, apr_pool_t *scratch_pool)
+{
+ svn_x509_certinfo_t *certinfo;
+ const apr_array_header_t *hostnames;
+
+ SVN_ERR(svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len,
+ scratch_pool, scratch_pool));
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Subject: %s\n"),
+ svn_x509_certinfo_get_subject(certinfo, scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Valid from: %s\n"),
+ svn_time_to_human_cstring(
+ svn_x509_certinfo_get_valid_from(certinfo),
+ scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Valid until: %s\n"),
+ svn_time_to_human_cstring(
+ svn_x509_certinfo_get_valid_to(certinfo),
+ scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Issuer: %s\n"),
+ svn_x509_certinfo_get_issuer(certinfo, scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Fingerprint: %s\n"),
+ svn_checksum_to_cstring_display(
+ svn_x509_certinfo_get_digest(certinfo),
+ scratch_pool)));
+
+ hostnames = svn_x509_certinfo_get_hostnames(certinfo);
+ if (hostnames && !apr_is_empty_array(hostnames))
+ {
+ int i;
+ svn_stringbuf_t *buf = svn_stringbuf_create_empty(scratch_pool);
+ for (i = 0; i < hostnames->nelts; ++i)
+ {
+ const char *hostname = APR_ARRAY_IDX(hostnames, i, const char*);
+ if (i > 0)
+ svn_stringbuf_appendbytes(buf, ", ", 2);
+ svn_stringbuf_appendbytes(buf, hostname, strlen(hostname));
+ }
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Hostnames: %s\n"),
+ buf->data));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_boolean_t
+is_der_cert(const svn_string_t *raw)
+{
+ /* really simplistic fingerprinting of a DER. By definition it must
+ * start with an ASN.1 tag of a constructed (0x20) sequence (0x10).
+ * It's somewhat unfortunate that 0x30 happens to also come out to the
+ * ASCII for '0' which may mean this will create false positives. */
+ return raw->data[0] == 0x30 ? TRUE : FALSE;
+}
+
+static svn_error_t *
+get_der_cert_from_stream(const svn_string_t **der_cert, svn_stream_t *in,
+ apr_pool_t *pool)
+{
+ svn_string_t *raw;
+ SVN_ERR(svn_string_from_stream2(&raw, in, SVN__STREAM_CHUNK_SIZE,
+ pool));
+
+ *der_cert = NULL;
+
+ /* look for a DER cert */
+ if (is_der_cert(raw))
+ {
+ *der_cert = raw;
+ return SVN_NO_ERROR;
+ }
+ else
+ {
+ const svn_string_t *base64_decoded;
+ const char *start, *end;
+
+ /* Try decoding as base64 without headers */
+ base64_decoded = svn_base64_decode_string(raw, pool);
+ if (base64_decoded && is_der_cert(base64_decoded))
+ {
+ *der_cert = base64_decoded;
+ return SVN_NO_ERROR;
+ }
+
+ /* Try decoding as a PEM with begining and ending headers. */
+ start = strstr(raw->data, PEM_BEGIN_CERT);
+ end = strstr(raw->data, PEM_END_CERT);
+ if (start && end && end > start)
+ {
+ svn_string_t *encoded;
+
+ start += sizeof(PEM_BEGIN_CERT) - 1;
+ end -= 1;
+ encoded = svn_string_ncreate(start, end - start, pool);
+ base64_decoded = svn_base64_decode_string(encoded, pool);
+ if (is_der_cert(base64_decoded))
+ {
+ *der_cert = base64_decoded;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+
+ return svn_error_create(SVN_ERR_X509_CERT_INVALID_PEM, NULL,
+ _("Couldn't find certificate in input data"));
+}
+
+int main (int argc, const char *argv[])
+{
+ apr_pool_t *pool = NULL;
+ svn_error_t *err;
+ svn_stream_t *in;
+
+ apr_initialize();
+ atexit(apr_terminate);
+
+ pool = svn_pool_create(NULL);
+
+ if (argc == 2)
+ {
+ const char *target = svn_dirent_canonicalize(argv[1], pool);
+ err = svn_stream_open_readonly(&in, target, pool, pool);
+ }
+ else if (argc == 1)
+ {
+ err = svn_stream_for_stdin2(&in, TRUE, pool);
+ }
+ else
+ err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL, _("Too many arguments"));
+
+ if (!err)
+ {
+ const svn_string_t *der_cert;
+ err = get_der_cert_from_stream(&der_cert, in, pool);
+ if (!err)
+ err = show_cert(der_cert, pool);
+ }
+
+ if (err)
+ return svn_cmdline_handle_exit_error(err, pool, "x509-parser: ");
+
+ return 0;
+}
diff --git a/tools/diff/diff.c b/tools/diff/diff.c
new file mode 100644
index 0000000..c69a6d9
--- /dev/null
+++ b/tools/diff/diff.c
@@ -0,0 +1,164 @@
+/* diff.c -- test driver for text diffs
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include <apr.h>
+#include <apr_general.h>
+#include <apr_file_io.h>
+
+#include "svn_pools.h"
+#include "svn_diff.h"
+#include "svn_io.h"
+#include "svn_utf.h"
+
+static svn_error_t *
+do_diff(svn_stream_t *ostream,
+ const char *original,
+ const char *modified,
+ svn_boolean_t *has_changes,
+ svn_diff_file_options_t *options,
+ svn_boolean_t show_c_function,
+ apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+
+ SVN_ERR(svn_diff_file_diff_2(&diff, original, modified, options, pool));
+ *has_changes = svn_diff_contains_diffs(diff);
+ return svn_diff_file_output_unified4(ostream, diff, original, modified,
+ NULL, NULL, SVN_APR_LOCALE_CHARSET,
+ NULL, show_c_function,
+ options->context_size,
+ NULL, NULL, pool);
+}
+
+static void
+print_usage(svn_stream_t *ostream, const char *progname,
+ apr_pool_t *pool)
+{
+ svn_error_clear(svn_stream_printf(ostream, pool,
+ "Usage: %s [OPTIONS] <file1> <file2>\n"
+ "\n"
+ "Display the differences between <file1> and <file2> in unified diff\n"
+ "format. OPTIONS are diff extensions as described by 'svn help diff'.\n"
+ "Use '--' alone to indicate that no more options follow.\n",
+ progname));
+}
+
+int main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ svn_stream_t *ostream;
+ svn_error_t *svn_err;
+ svn_boolean_t has_changes;
+ svn_diff_file_options_t *diff_options;
+ apr_array_header_t *options_array;
+ int i;
+ const char *from = NULL;
+ const char *to = NULL;
+ svn_boolean_t show_c_function = FALSE;
+ svn_boolean_t no_more_options = FALSE;
+
+ apr_initialize();
+ atexit(apr_terminate);
+
+ pool = svn_pool_create(NULL);
+
+ svn_err = svn_stream_for_stdout(&ostream, pool);
+ if (svn_err)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff: ");
+ return 2;
+ }
+
+ options_array = apr_array_make(pool, 0, sizeof(const char *));
+
+ diff_options = svn_diff_file_options_create(pool);
+
+ for (i = 1 ; i < argc ; i++)
+ {
+ if (!no_more_options && (argv[i][0] == '-'))
+ {
+ /* Special case: '--' means "no more options follow" */
+ if (argv[i][1] == '-' && !argv[i][2])
+ {
+ no_more_options = TRUE;
+ continue;
+ }
+ /* Special case: we need to detect '-p' and handle it specially */
+ if (argv[i][1] == 'p' && !argv[i][2])
+ {
+ show_c_function = TRUE;
+ continue;
+ }
+ if (argv[i][1] == 'w' && !argv[i][2])
+ {
+ diff_options->ignore_space = svn_diff_file_ignore_space_all;
+ continue;
+ }
+
+ APR_ARRAY_PUSH(options_array, const char *) = argv[i];
+
+ /* Special case: '-U' takes an argument, so capture the
+ * next argument in the array. */
+ if (argv[i][1] == 'U' && !argv[i][2])
+ {
+ i++;
+ APR_ARRAY_PUSH(options_array, const char *) = argv[i];
+ }
+ }
+ else
+ {
+ if (from == NULL)
+ from = argv[i];
+ else if (to == NULL)
+ to = argv[i];
+ else
+ {
+ print_usage(ostream, argv[0], pool);
+ return 2;
+ }
+ }
+ }
+
+ if (!from || !to)
+ {
+ print_usage(ostream, argv[0], pool);
+ return 2;
+ }
+
+ svn_err = svn_diff_file_options_parse(diff_options, options_array, pool);
+ if (svn_err)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff: ");
+ return 2;
+ }
+
+ svn_err = do_diff(ostream, from, to, &has_changes,
+ diff_options, show_c_function, pool);
+ if (svn_err)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff: ");
+ return 2;
+ }
+
+ return has_changes ? 1 : 0;
+}
diff --git a/tools/diff/diff3.c b/tools/diff/diff3.c
new file mode 100644
index 0000000..5502254
--- /dev/null
+++ b/tools/diff/diff3.c
@@ -0,0 +1,228 @@
+/* diff3.c -- test driver for 3-way text merges
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include <apr.h>
+#include <apr_general.h>
+#include <apr_file_io.h>
+
+#include "svn_pools.h"
+#include "svn_diff.h"
+#include "svn_io.h"
+#include "svn_opt.h"
+#include "private/svn_token.h"
+
+
+static svn_error_t *
+do_diff3(svn_stream_t *ostream,
+ const char *original,
+ const char *modified,
+ const char *latest,
+ const char *conflict_original,
+ const char *conflict_modified,
+ const char *conflict_latest,
+ svn_diff_conflict_display_style_t conflict_style,
+ svn_boolean_t *has_changes,
+ apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+
+ SVN_ERR(svn_diff_file_diff3_2(&diff, original, modified, latest,
+ svn_diff_file_options_create(pool), pool));
+
+ *has_changes = svn_diff_contains_diffs(diff);
+
+ SVN_ERR(svn_diff_file_output_merge3(ostream, diff,
+ original, modified, latest,
+ conflict_original,
+ conflict_modified,
+ conflict_latest,
+ "=======",
+ conflict_style,
+ NULL, NULL, /* cancel */
+ pool));
+
+ return NULL;
+}
+
+int main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ svn_stream_t *ostream;
+ int rc;
+ svn_error_t *svn_err = SVN_NO_ERROR;
+ apr_getopt_t *opts;
+ svn_boolean_t help = FALSE;
+
+ enum {
+ conflict_style_opt = SVN_OPT_FIRST_LONGOPT_ID
+ };
+ static const apr_getopt_option_t options[] = {
+ {"conflict-style", conflict_style_opt, 1, ""},
+ {"label", 'L', 1, ""},
+ {"show-overlap", 'E', 0, ""},
+ {"merge", 'm', 0, ""},
+ {"help", 'h', 0, ""},
+ {NULL, '?', 0, ""},
+ {NULL, 0, 0, NULL}
+ };
+ svn_diff_conflict_display_style_t conflict_style
+ = svn_diff_conflict_display_modified_latest;
+ const svn_token_map_t style_map[] = {
+ { "modified-latest",
+ svn_diff_conflict_display_modified_latest },
+ { "resolved-modified-latest",
+ svn_diff_conflict_display_resolved_modified_latest },
+ { "modified-original-latest",
+ svn_diff_conflict_display_modified_original_latest },
+ { "modified",
+ svn_diff_conflict_display_modified },
+ { "latest",
+ svn_diff_conflict_display_latest },
+ { "only-conflicts",
+ svn_diff_conflict_display_only_conflicts },
+ {NULL, 0}
+ };
+ const char *conflict_original = NULL;
+ const char *conflict_modified = NULL;
+ const char *conflict_latest = NULL;
+
+ apr_initialize();
+
+ pool = svn_pool_create(NULL);
+
+ apr_getopt_init(&opts, pool, argc, argv);
+ opts->interleave = 1;
+ while (!svn_err)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(opts, options, &opt, &arg);
+
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ svn_err = svn_error_wrap_apr(status, "getopt failure");
+ break;
+ }
+ switch (opt)
+ {
+ case conflict_style_opt:
+ {
+ int val;
+ svn_err = svn_token__from_word_err(&val, style_map, arg);
+ conflict_style = val;
+ break;
+ }
+ case 'L':
+ if (!conflict_modified)
+ conflict_modified = apr_pstrcat(pool, "<<<<<<< ", arg, SVN_VA_NULL);
+ else if (!conflict_original)
+ conflict_original = apr_pstrcat(pool, "||||||| ", arg, SVN_VA_NULL);
+ else if (!conflict_latest)
+ conflict_latest = apr_pstrcat(pool, ">>>>>>> ", arg, SVN_VA_NULL);
+ else
+ svn_err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ "too many labels");
+ break;
+ case 'E':
+ case 'm':
+ /* These are allowed and ignored so that all the options
+ passed when invoking --diff3-cmd are accepted as that
+ makes it easier to use this as an external diff3
+ program. */
+ break;
+ case 'h':
+ case '?':
+ help = TRUE;
+ break;
+ }
+ }
+
+ if (!svn_err)
+ svn_err = svn_stream_for_stdout(&ostream, pool);
+ if (svn_err)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff3: ");
+ svn_error_clear(svn_err);
+ rc = 2;
+ }
+ else if (argc - opts->ind == 3 && !help)
+ {
+ svn_boolean_t has_changes;
+
+ svn_err = do_diff3(ostream, argv[argc-2], argv[argc-3], argv[argc-1],
+ conflict_original, conflict_modified, conflict_latest,
+ conflict_style, &has_changes, pool);
+ if (svn_err == NULL)
+ {
+ rc = has_changes ? 1 : 0;
+ }
+ else
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff3: ");
+ rc = 2;
+ }
+ }
+ else
+ {
+ svn_error_clear(svn_stream_printf(ostream, pool,
+ "Usage: %s [options] <mine> <older> <yours>\n"
+ "Options:\n"
+ " --conflict-style STYLE\n"
+ " where STYLE can be:\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ "\n"
+ " --label [-L] LABEL\n"
+ " can be repeated up to three times\n"
+ "\n"
+ " --merge [-m]\n"
+ " ignored (present for compatibility)\n"
+ "\n"
+ " --show-overlap [-E]\n"
+ " ignored (present for compatibility)\n",
+ argv[0],
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_resolved_modified_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified_original_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_only_conflicts)));
+ rc = 2;
+ }
+
+ apr_terminate();
+
+ return rc;
+}
diff --git a/tools/diff/diff4.c b/tools/diff/diff4.c
new file mode 100644
index 0000000..58216a8
--- /dev/null
+++ b/tools/diff/diff4.c
@@ -0,0 +1,94 @@
+/* diff4.c -- test driver for 4-way text merges
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+
+#include <apr.h>
+#include <apr_general.h>
+#include <apr_file_io.h>
+
+#include "svn_pools.h"
+#include "svn_diff.h"
+#include "svn_io.h"
+
+
+static svn_error_t *
+do_diff4(svn_stream_t *ostream,
+ const char *original,
+ const char *modified,
+ const char *latest,
+ const char *ancestor,
+ apr_pool_t *pool)
+{
+ svn_diff_t *diff;
+
+ SVN_ERR(svn_diff_file_diff4_2(&diff, original, modified, latest, ancestor,
+ svn_diff_file_options_create(pool), pool));
+ SVN_ERR(svn_diff_file_output_merge3(ostream, diff,
+ original, modified, latest,
+ NULL, NULL, NULL, NULL,
+ svn_diff_conflict_display_modified_latest,
+ NULL, NULL, /* cancel */
+ pool));
+
+ return NULL;
+}
+
+int main(int argc, char *argv[])
+{
+ apr_pool_t *pool;
+ svn_stream_t *ostream;
+ int rc = 0;
+ svn_error_t *svn_err;
+
+ apr_initialize();
+
+ pool = svn_pool_create(NULL);
+
+ svn_err = svn_stream_for_stdout(&ostream, pool);
+ if (svn_err)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff4: ");
+ rc = 2;
+ }
+ else if (argc == 5)
+ {
+ svn_err = do_diff4(ostream,
+ argv[2], argv[1], argv[3], argv[4],
+ pool);
+ if (svn_err != NULL)
+ {
+ svn_handle_error2(svn_err, stdout, FALSE, "diff4: ");
+ rc = 2;
+ }
+ }
+ else
+ {
+ svn_error_clear(svn_stream_printf
+ (ostream, pool, "Usage: %s <mine> <older> <yours> <ancestor>\n",
+ argv[0]));
+ rc = 2;
+ }
+
+ apr_terminate();
+
+ return rc;
+}
diff --git a/tools/dist/README.advisory b/tools/dist/README.advisory
new file mode 100644
index 0000000..2945d1a
--- /dev/null
+++ b/tools/dist/README.advisory
@@ -0,0 +1,78 @@
+A guide to sending security advisory e-mails
+============================================
+
+--------------------------------------------------------
+Step 1: Prepare the advisory texts, patches and metadata
+--------------------------------------------------------
+
+[details are covered elsewhere]
+
+----------------------------------
+Step 2: Prepare the website update
+----------------------------------
+
+ $ cd ${PMC_AREA_WC}/security
+ $ ${TRUNK_WC}/tools/dist/advisory.py generate \
+ --destination=${SITE_WC}/publish/security \
+ CVE-2015-5259 CVE-2015-5343 ...
+
+This will generate a plain-text version of the advisories, including
+patches etc., suitable for publishing on our web site. Once these
+are generated, make sure you add the links to the new files to:
+
+ ${SITE_WC}/publish/security/index.html
+
+
+-----------------------------------------------
+Step 3: Check the advisories and their metadata
+-----------------------------------------------
+
+ $ cd ${PMC_AREA_WC}/security
+ $ ${TRUNK_WC}/tools/dist/advisory.py test \
+ --username=someone \
+ --revision=22091347 \
+ --release-versions=1.8.15,1.9.3 \
+ --release-date=2015-12-15 \
+ CVE-2015-5259 CVE-2015-5343 ...
+
+Assuming all the required bits are in place, this will generate the
+complete text of a GPG-signed e-mail message, signed by and sent from
+someone@apache.org, for all the listed CVE numbers.
+
+Note the arguments:
+
+ --revision is the revision on
+ https://dist.apache.org/repos/dist/dev/subversion
+ in which the tarballs are/will be available
+ (see: notice-template.txt in ${PMC_AREA_WC}/security).
+
+ --release-versions is a comma-separated list of version numbers
+ in which fixes for the CVE numbers will be
+ available.
+
+ --release-date is the expected date of the release(s).
+
+
+----------------------
+Step 4: Send the mails
+----------------------
+
+ $ cd ${PMC_AREA_WC}/security
+ $ ${TRUNK_WC}/tools/dist/advisory.py send \
+ (the rest of the arguments are as in step 3).
+
+The mails will be sent one at a time to each recipient separately.
+
+
+--------------------------------------------------
+Step 5: Wait for the release. Release.
+ Commit the site update prepared in step 1.
+--------------------------------------------------
+
+
+
+TODO: security/mailer.py does not calculate the micalg= PGP/MIME
+ parameter based on the properties of the actual PGP key
+ used. It's currently hard-coded as "pgp-sha512" which *should*
+ be correct for anyone signing these mails with their ASF release
+ signing key.
diff --git a/tools/dist/README.backport b/tools/dist/README.backport
new file mode 100644
index 0000000..0b9c66e
--- /dev/null
+++ b/tools/dist/README.backport
@@ -0,0 +1,65 @@
+A guide to the various backport scripts:
+========================================
+
+There two primary functions:
+
+F1. Auto-merge bot; the nightly svn-role commits.
+
+F2. Conflicts detector bot; the svn-backport-conflicts-1.9.x buildbot task.
+
+And two interactive functions, described later.
+
+
+
+The scripts are:
+
+backport.pl:
+ oldest script, implements both [F1] and [F2], plus two interactive
+ functions¹. As of March 2015, used in production by svn-role and
+ by svn-backport-conflicts-1.9.x.
+
+nominate.pl:
+ Symlink to backport.pl. Implements one of the two interactive features.
+ Not used by bots.
+
+backport_tests_pl.py:
+ Regression tests for backport.pl.
+
+
+backport/*.py:
+ New Python-based library implementation of STATUS parsing (status.py) and
+ of merging nominated revisions (merger.py). Colloquially referred to as
+ 'backport.py', even though there is no script by that name. Written in
+ Python 3.
+
+ The modules include their unit tests, see 'python3 -munittest
+ backport.status' and 'python3 -munittest backport.merger'. However,
+ changes to these files should be tested both by the unit tests and by the
+ backport_tests_py.py blackbox/regression tests.
+
+detect-conflicting-backports.py:
+ Implementation of [F2] using backport.py.
+
+merge-approved-backports.py:
+ Implementation of [F1] using backport.py.
+
+backport_tests_py.py:
+ Regression tests for detect-conflicting-backports.py and merge-approved-backports.py
+
+
+backport_tests.py:
+ Common part of backport_tests_pl.py and backport_tests_py.py. Uses the
+ svntest framework (../../subversion/tests/cmdline/svntest/), which is
+ written in Python 2.
+
+backport*.dump:
+ Expected output files for backport_tests.py; see the BackportTest
+ decorator.
+
+
+All scripts can be run with '--help' to display their usage messages.
+
+
+
+¹ For backport.pl's interactive features, see:
+<http://mail-archives.apache.org/mod_mbox/subversion-dev/201503.mbox/%3c20150304225114.GD2036@tarsus.local2%3e>
diff --git a/tools/dist/advisory.py b/tools/dist/advisory.py
new file mode 100755
index 0000000..ea3c6c0
--- /dev/null
+++ b/tools/dist/advisory.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""
+Send GPG-signed security advisory e-mails from an @apache.org address
+to a known list of recipients, or write the advisory text in a form
+suitable for publishing on http://subversion.apache.org/.
+
+Usage: cd to the root directory of the advisory descriptions, then:
+
+ $ ${TRUNK_WC}/tools/dist/advisory.py send \
+ --username=<ASF-username> \
+ --revision=<dist-dev-revision-number>
+ --release-versions=<target-releases> \
+ --release-date=<expected-release-date> <CVE-number>...
+
+or
+
+ $ ${TRUNK_WC}/tools/dist/advisory.py test \
+ (... --username, etc. as above)
+
+or
+
+ $ ${TRUNK_WC}/tools/dist/advisory.py generate \
+ --destination=${SITE_WC}/publish/security \
+ <CVE-number>...
+"""
+
+from __future__ import absolute_import
+
+import os
+import sys
+import argparse
+import datetime
+import getpass
+import re
+
+import security.parser
+import security.adviser
+import security.mailer
+import security.mailinglist
+
+ROOTDIR = os.path.abspath(os.getcwd())
+NOTICE_TEMPLATE = 'notice-template.txt'
+MAILING_LIST = 'pre-notifications.txt'
+
+
+def parse_args(argv):
+ parser = argparse.ArgumentParser(
+ prog=os.path.basename(__file__), add_help=True,
+ description="""\
+Send GPG-signed security advisory e-mails from an @apache.org address
+to a known list of recipients, or write the advisory text in a form
+suitable for publishing on http://subversion.apache.org/.
+""")
+ parser.add_argument(
+ 'command', action='store',
+ choices=['send', 'test', 'generate'],
+ help=('send: send mail; '
+ 'test: write the mail to standard output; '
+ 'generate: write an advisory for the website'))
+ parser.add_argument(
+ '--username', action='store', required=False,
+ help='the @apache.org username of the sender')
+ parser.add_argument(
+ '--revision', action='store', required=False, type=int,
+ help=('revision on dist.a.o./repos/dist/dev/subversion '
+ 'in which the patched tarballs are available'))
+ parser.add_argument(
+ '--release-versions', action='store', required=False,
+ help=('comma-separated list of future released versions '
+ 'that will contain the fix(es)'))
+ parser.add_argument(
+ '--release-date', action='store', required=False,
+ help=('expected release date for the above mentioned'
+ ' versions (in ISO format, YYYY-MM-DD)'))
+ parser.add_argument(
+ '--destination', action='store', required=False,
+ help=('the directory where the website advisory should be '
+ 'written; usually ${SITE_WC}/publish/security'))
+ parser.add_argument('cve', nargs='+')
+
+ return parser.parse_args(argv)
+
+
+def check_root():
+ if not os.path.isfile(os.path.join(ROOTDIR, NOTICE_TEMPLATE)):
+ sys.stderr.write('Missing file: ' + NOTICE_TEMPLATE + '\n')
+ sys.exit(1)
+ if not os.path.isfile(os.path.join(ROOTDIR, MAILING_LIST)):
+ sys.stderr.write('Missing file: ' + MAILING_LIST + '\n')
+ sys.exit(1)
+
+
+def check_sendmail(args):
+ if (not (args.username and args.revision
+ and args.release_versions
+ and args.release_date and args.cve)
+ or args.destination):
+ sys.stderr.write(
+ 'The "' + args.command + '" command requires the '
+ 'following options:\n'
+ ' --username, --revision, --release-versions, --release-date\n'
+ ' and a list of CVE numbers.\n')
+ sys.exit(1)
+ args.release_versions = re.split(r'\s*,\s*', args.release_versions)
+ args.release_date = datetime.datetime.strptime(args.release_date,
+ '%Y-%m-%d')
+
+
+def sendmail(really_send, args):
+ notice_template = os.path.join(ROOTDIR, NOTICE_TEMPLATE)
+ mailing_list = os.path.join(ROOTDIR, MAILING_LIST)
+ sender = args.username + '@apache.org'
+ notification = security.parser.Notification(ROOTDIR, *args.cve)
+ mailer = security.mailer.Mailer(notification,
+ args.username + '@apache.org',
+ notice_template,
+ args.release_date,
+ args.revision,
+ *args.release_versions)
+ message = mailer.generate_message()
+ recipients = security.mailinglist.MailingList(mailing_list)
+ if (not really_send):
+ sys.stdout.write(message.as_string())
+ return
+
+ password = getpass.getpass('Password for ' + args.username
+ + ' at mail-relay.apache.org: ')
+ mailer.send_mail(message, args.username, password,
+ recipients=recipients)
+
+
+def check_generate(args):
+ if (not (args.destination and args.cve)
+ or args.username or args.revision
+ or args.release_versions
+ or args.release_date):
+ sys.stderr.write(
+ 'The "generate" command requires the '
+ '--destination option '
+ 'and a list of CVE numbers.\n')
+ sys.exit(1)
+ if not os.path.isdir(args.destination):
+ sys.stderr.write(args.destination + ' is not a directory')
+ sys.exit(1)
+
+def generate(args):
+ notification = security.parser.Notification(ROOTDIR, *args.cve)
+ security.adviser.generate(notification, args.destination);
+
+
+def main():
+ check_root()
+ args = parse_args(sys.argv[1:])
+ if args.command in ('send', 'test'):
+ check_sendmail(args)
+ sendmail(args.command == 'send', args)
+ elif args.command == 'generate':
+ check_generate(args)
+ generate(args)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dist/backport.pl b/tools/dist/backport.pl
new file mode 100755
index 0000000..67f8313
--- /dev/null
+++ b/tools/dist/backport.pl
@@ -0,0 +1,1325 @@
+#!/usr/bin/perl
+use warnings;
+use strict;
+use feature qw/switch say/;
+
+use v5.10.0; # needed for $^V
+
+# The given/when smartmatch facility, introduced in Perl v5.10, was made
+# experimental and "subject to change" in v5.18 (see perl5180delta). Every
+# use of it now triggers a warning.
+#
+# As of Perl v5.24.1, the semantics of given/when provided by Perl are
+# compatible with those expected by the script, so disable the warning for
+# those Perls. But don't try to disable the the warning category on Perls
+# that don't know that category, since that breaks compilation.
+no if (v5.17.0 le $^V and $^V le v5.24.1),
+ warnings => 'experimental::smartmatch';
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use Carp qw/croak confess carp cluck/;
+use Digest ();
+use Term::ReadKey qw/ReadMode ReadKey/;
+use File::Basename qw/basename dirname/;
+use File::Copy qw/copy move/;
+use File::Temp qw/tempfile/;
+use IO::Select ();
+use IPC::Open3 qw/open3/;
+use POSIX qw/ctermid strftime/;
+use Text::Wrap qw/wrap/;
+use Tie::File ();
+
+############### Start of reading values from environment ###############
+
+# Programs we use.
+#
+# TODO: document which are interpreted by sh and which should point to binary.
+my $SVN = $ENV{SVN} || 'svn'; # passed unquoted to sh
+$SVN .= " --config-option=config:miscellany:log-encoding=UTF-8";
+my $SHELL = $ENV{SHELL} // '/bin/sh';
+my $VIM = 'vim';
+my $EDITOR = $ENV{SVN_EDITOR} // $ENV{VISUAL} // $ENV{EDITOR} // 'ed';
+my $PAGER = $ENV{PAGER} // 'less' // 'cat';
+
+# Mode flags.
+package Mode {
+ use constant {
+ AutoCommitApproveds => 1, # used by nightly commits (svn-role)
+ Conflicts => 2, # used by the hourly conflicts-detection buildbot
+ Interactive => 3,
+ };
+};
+my $YES = ($ENV{YES} // "0") =~ /^(1|yes|true)$/i; # batch mode: eliminate prompts, add sleeps
+my $MAY_COMMIT = ($ENV{MAY_COMMIT} // "false") =~ /^(1|yes|true)$/i;
+my $MODE = ($YES ? ($MAY_COMMIT ? Mode::AutoCommitApproveds : Mode::Conflicts )
+ : Mode::Interactive );
+
+# Other knobs.
+my $VERBOSE = 0;
+my $DEBUG = (exists $ENV{DEBUG}); # 'set -x', etc
+
+# Force all these knobs to be usable via @sh.
+my @sh = qw/false true/;
+die if grep { ($sh[$_] eq 'true') != !!$_ } $DEBUG, $MAY_COMMIT, $VERBOSE, $YES;
+
+# Username for entering votes.
+my $SVN_A_O_REALM = '<https://svn.apache.org:443> ASF Committers';
+my ($AVAILID) = $ENV{AVAILID} // do {
+ local $_ = `$SVN auth svn.apache.org:443 2>/dev/null`; # TODO: pass $SVN_A_O_REALM
+ ($? == 0 && /Auth.*realm: \Q$SVN_A_O_REALM\E\nUsername: (.*)/) ? $1 : undef
+} // do {
+ local $/; # slurp mode
+ my $fh;
+ my $dir = "$ENV{HOME}/.subversion/auth/svn.simple/";
+ my $filename = Digest->new("MD5")->add($SVN_A_O_REALM)->hexdigest;
+ open $fh, '<', "$dir/$filename"
+ and <$fh> =~ /K 8\nusername\nV \d+\n(.*)/
+ ? $1
+ : undef
+};
+
+unless (defined $AVAILID) {
+ unless ($MODE == Mode::Conflicts) {
+ warn "Username for commits (of votes/merges) not found; "
+ ."it will be possible to review nominations but not to commit votes "
+ ."or merges.\n";
+ warn "Press the 'any' key to continue...\n";
+ die if $MODE == Mode::AutoCommitApproveds; # unattended mode; can't prompt.
+ ReadMode 'cbreak';
+ ReadKey 0;
+ ReadMode 'restore';
+ }
+}
+
+############## End of reading values from the environment ##############
+
+# Constants.
+my $STATUS = './STATUS';
+my $STATEFILE = './.backports1';
+my $BRANCHES = '^/subversion/branches';
+my $TRUNK = '^/subversion/trunk';
+$ENV{LC_ALL} = "C"; # since we parse 'svn info' output
+
+# Globals.
+my %ERRORS = ();
+# TODO: can $MERGED_SOMETHING be removed and references to it replaced by scalar(@MERGES_TODAY) ?
+# alternately, does @MERGES_TODAY need to be purged whenever $MERGED_SOMETHING is reset?
+# The scalar is only used in interactive runs, but the array is used in
+# svn-role batch mode too.
+my @MERGES_TODAY;
+my $MERGED_SOMETHING = 0;
+my $SVNq;
+
+# Derived values.
+my $SVNvsn = do {
+ my ($major, $minor, $patch) = `$SVN --version -q` =~ /^(\d+)\.(\d+)\.(\d+)/;
+ 1e6*$major + 1e3*$minor + $patch;
+};
+$SVN .= " --non-interactive" if $YES or not defined ctermid;
+$SVNq = "$SVN -q ";
+$SVNq =~ s/-q// if $DEBUG;
+
+
+my $BACKPORT_OPTIONS_HELP = <<EOF;
+y: Run a merge. It will not be committed.
+ WARNING: This will run 'update' and 'revert -R ./'.
+l: Show logs for the entries being nominated.
+v: Show the full entry (the prompt only shows an abridged version).
+q: Quit the "for each entry" loop. If you have entered any votes or
+ approvals, you will be prompted to commit them.
+±1: Enter a +1 or -1 vote
+ You will be prompted to commit your vote at the end.
+±0: Enter a +0 or -0 vote
+ You will be prompted to commit your vote at the end.
+a: Move the entry to the "Approved changes" section.
+ When both approving and voting on an entry, approve first: for example,
+ to enter a third +1 vote, type "a" "+" "1".
+e: Edit the entry in \$EDITOR, which is '$EDITOR'.
+ You will be prompted to commit your edits at the end.
+N: Move to the next entry. Do not prompt for the current entry again, even
+ in future runs, unless the STATUS nomination has been modified (e.g.,
+ revisions added, justification changed) in the repository.
+ (This is a local action that will not affect other people or bots.)
+ : Move to the next entry. Prompt for the current entry again in the next
+ run of backport.pl.
+ (That's a space character, ASCII 0x20.)
+?: Display this list.
+EOF
+
+my $BACKPORT_OPTIONS_MERGE_OPTIONS_HELP = <<EOF;
+y: Open a shell.
+d: View a diff.
+N: Move to the next entry.
+?: Display this list.
+EOF
+
+sub backport_usage {
+ my $basename = basename $0;
+ print <<EOF;
+backport.pl: a tool for reviewing, merging, and voting on STATUS entries.
+
+Normally, invoke this with CWD being the root of the stable branch (e.g.,
+1.8.x):
+
+ Usage: test -e \$d/STATUS && cd \$d && \\
+ backport.pl [PATTERN]
+ (where \$d is a working copy of branches/1.8.x)
+
+Alternatively, invoke this via a symlink named "b" placed at the same directory
+as the STATUS file, in which case the CWD doesn't matter (the script will cd):
+
+ Usage: ln -s /path/to/backport.pl \$d/b && \\
+ \$d/b [PATTERN]
+ (where \$d is a working copy of branches/1.8.x)
+
+In either case, the ./STATUS file should be at HEAD. If it has local mods,
+they will be preserved through 'revert' operations but included in 'commit'
+operations.
+
+If PATTERN is provided, only entries which match PATTERN are considered. The
+sense of "match" is either substring (fgrep) or Perl regexp (with /msi).
+
+In interactive mode (the default), you will be prompted once per STATUS entry.
+At a prompt, you have the following options:
+
+$BACKPORT_OPTIONS_HELP
+
+After running a merge, you have the following options:
+
+$BACKPORT_OPTIONS_MERGE_OPTIONS_HELP
+
+To commit a merge, you have two options: either answer 'y' to the second prompt
+to open a shell, and manually run 'svn commit' therein; or set \$MAY_COMMIT=1
+in the environment before running the script, in which case answering 'y'
+to the first prompt will not only run the merge but also commit it.
+
+There are two batch modes. The first mode is used by the nightly svn-role
+mergebot. It is enabled by setting \$YES and \$MAY_COMMIT to '1' in the
+environment. In this mode, the script will iterate the "Approved changes:"
+section and merge and commit each entry therein. To prevent an entry from
+being auto-merged, veto it or move it to a new section named "Approved, but
+merge manually:".
+
+The second batch mode is used by the hourly conflicts detector bot. It is
+triggered by having \$YES defined in the environment to '1' and \$MAY_COMMIT
+undefined. In this mode, the script will locally merge every nomination
+(including unapproved and vetoed ones), and complain to stderr if the merge
+failed due to a conflict. This mode never commits anything.
+
+The hourly conflicts detector bot turns red if any entry produced a merge
+conflict. When entry A depends on entry B for a clean merge, put a "Depends:"
+header on entry A to instruct the bot not to turn red due to A. (The header
+is not parsed; only its presence or absence matters.)
+
+Both batch modes also perform a basic sanity-check on entries that declare
+backport branches (via the "Branch:" header): if a backport branch is used, but
+at least one of the revisions enumerated in the entry title had neither been
+merged from $TRUNK to the branch root, nor been committed
+directly to the backport branch, the hourly bot will turn red and
+nightly bot will skip the entry and email its admins. (The nightly bot does
+not email the list on failure, since it doesn't use buildbot.)
+
+The 'svn' binary defined by the environment variable \$SVN, or otherwise the
+'svn' found in \$PATH, will be used to manage the working copy.
+EOF
+}
+
+sub nominate_usage {
+ my $availid = $AVAILID // "(your username)";
+ my $basename = basename $0;
+ print <<EOF;
+nominate.pl: a tool for adding entries to STATUS.
+
+Usage: $0 "r42, r43, r45" "\$Some_justification"
+
+Will add:
+ * r42, r43, r45
+ (log message of r42)
+ Justification:
+ \$Some_justification
+ Votes:
+ +1: $availid
+to STATUS. Backport branches are detected automatically.
+
+The revisions argument may contain arbitrary text (besides the revision
+numbers); it will be ignored. For example,
+ $0 "Committed revision 42." "\$Some_justification"
+will nominate r42.
+
+The justification can be an arbitrarily-long string; if it is wider than the
+available width, this script will wrap it for you (and allow you to review
+the result before committing).
+
+The STATUS file in the current directory is used (unless argv[0] is "n", in
+which case the STATUS file in the directory of argv[0] is used; the intent
+is to create a symlink named "n" in the branch wc root).
+
+EOF
+# TODO: Optionally add a "Notes" section.
+# TODO: Look for backport branches named after issues.
+# TODO: Do a dry-run merge on added entries.
+# TODO: Do a dry-run merge on interactively-edited entries in backport.pl
+}
+
+# If $AVAILID is undefined, warn about it and return true.
+# Else return false.
+#
+# $_[0] is a string for inclusion in generated error messages.
+sub warned_cannot_commit {
+ my $caller_error_string = shift;
+ return 0 if defined $AVAILID;
+
+ warn "$0: $caller_error_string: unable to determine your username via \$AVAILID or svnauth(1) or ~/.subversion/auth/";
+ return 1;
+}
+
+sub digest_string {
+ Digest->new("MD5")->add(@_)->hexdigest
+}
+
+sub digest_entry($) {
+ # Canonicalize the number of trailing EOLs to two. This matters when there's
+ # on empty line after the last entry in Approved, for example.
+ local $_ = shift;
+ s/\n*\z// and $_ .= "\n\n";
+ digest_string($_)
+}
+
+sub prompt {
+ print $_[0]; shift;
+ my %args = @_;
+ my $getchar = sub {
+ my $answer;
+ do {
+ ReadMode 'cbreak';
+ $answer = (ReadKey 0);
+ ReadMode 'normal';
+ die if $@ or not defined $answer;
+ # Swallow terminal escape codes (e.g., arrow keys).
+ unless ($answer =~ m/^(?:[[:print:]]+|\s+)$/) {
+ $answer = (ReadKey -1) while defined $answer;
+ # TODO: provide an indication that the keystroke was sensed and ignored.
+ }
+ } until defined $answer and ($answer =~ m/^(?:[[:print:]]+|\s+)$/);
+ print $answer;
+ return $answer;
+ };
+
+ die "$0: called prompt() in non-interactive mode!" if $YES;
+ my $answer = $getchar->();
+ $answer .= $getchar->() if exists $args{extra} and $answer =~ $args{extra};
+ say "" unless $args{dontprint};
+ return $args{verbose}
+ ? $answer
+ : ($answer =~ /^y/i) ? 1 : 0;
+}
+
+# Bourne-escape a string.
+# Example:
+# >>> shell_escape(q[foo'bar]) eq q['foo'\''bar']
+# True
+sub shell_escape {
+ my (@reply) = map {
+ local $_ = $_; # the LHS $_ is mutable; the RHS $_ may not be.
+ s/\x27/'\\\x27'/g;
+ "'$_'"
+ } @_;
+ wantarray ? @reply : $reply[0]
+}
+
+sub shell_safe_path_or_url($) {
+ local $_ = shift;
+ return (m{^[A-Za-z0-9._:+/-]+$} and !/^-|^[+]/);
+}
+
+# Shell-safety-validating wrapper for File::Temp::tempfile
+sub my_tempfile {
+ my ($fh, $fn) = tempfile();
+ croak "Tempfile name '$fn' not shell-safe; aborting"
+ unless shell_safe_path_or_url $fn;
+ return ($fh, $fn);
+}
+
+# The first argument is a shell script. Run it and return the shell's
+# exit code, and stdout and stderr as references to arrays of lines.
+sub run_in_shell($) {
+ my $script = shift;
+ my $pid = open3 \*SHELL_IN, \*SHELL_OUT, \*SHELL_ERR, qw#/bin/sh#;
+ # open3 raises exception when it fails; no need to error check
+
+ print SHELL_IN $script;
+ close SHELL_IN;
+
+ # Read loop: tee stdout,stderr to arrays.
+ my $select = IO::Select->new(\*SHELL_OUT, \*SHELL_ERR);
+ my (@readable, $outlines, $errlines);
+ while (@readable = $select->can_read) {
+ for my $fh (@readable) {
+ my $line = <$fh>;
+ $select->remove($fh) if eof $fh or not defined $line;
+ next unless defined $line;
+
+ if ($fh == \*SHELL_OUT) {
+ push @$outlines, $line;
+ print STDOUT $line;
+ }
+ if ($fh == \*SHELL_ERR) {
+ push @$errlines, $line;
+ print STDERR $line;
+ }
+ }
+ }
+ waitpid $pid, 0; # sets $?
+ return $?, $outlines, $errlines;
+}
+
+
+# EXPECTED_ERROR_P is subref called with EXIT_CODE, OUTLINES, ERRLINES,
+# expected to return TRUE if the error should be considered fatal (cause
+# backport.pl to exit non-zero) or not. It may be undef for default behaviour.
+sub merge {
+ my %entry = %{ +shift };
+ my $expected_error_p = shift // sub { 0 }; # by default, errors are unexpected
+ my $parno = $entry{parno} - scalar grep { $_->{parno} < $entry{parno} } @MERGES_TODAY;
+
+ my ($logmsg_fh, $logmsg_filename) = my_tempfile();
+ my (@mergeargs);
+
+ my $shell_escaped_branch = shell_escape($entry{branch})
+ if defined($entry{branch});
+
+ if ($entry{branch}) {
+ if ($SVNvsn >= 1_008_000) {
+ @mergeargs = shell_escape "$BRANCHES/$entry{branch}";
+ say $logmsg_fh "Merge $entry{header}:";
+ } else {
+ @mergeargs = shell_escape qw/--reintegrate/, "$BRANCHES/$entry{branch}";
+ say $logmsg_fh "Reintegrate $entry{header}:";
+ }
+ say $logmsg_fh "";
+ } elsif (@{$entry{revisions}}) {
+ @mergeargs = shell_escape(
+ ($entry{accept} ? "--accept=$entry{accept}" : ()),
+ (map { "-c$_" } @{$entry{revisions}}),
+ '--',
+ '^/subversion/trunk',
+ );
+ say $logmsg_fh
+ "Merge $entry{header} from trunk",
+ $entry{accept} ? ", with --accept=$entry{accept}" : "",
+ ":";
+ say $logmsg_fh "";
+ } else {
+ die "Don't know how to call $entry{header}";
+ }
+ say $logmsg_fh $_ for @{$entry{entry}};
+ close $logmsg_fh or die "Can't close $logmsg_filename: $!";
+
+ my $reintegrated_word = ($SVNvsn >= 1_008_000) ? "merged" : "reintegrated";
+ my $script = <<"EOF";
+#!/bin/sh
+set -e
+if $sh[$DEBUG]; then
+ set -x
+fi
+$SVNq up
+$SVNq merge @mergeargs
+if [ "`$SVN status -q | wc -l`" -eq 1 ]; then
+ if [ -z "`$SVN diff | perl -lne 'print if s/^(Added|Deleted|Modified): //' | grep -vx svn:mergeinfo`" ]; then
+ # This check detects STATUS entries that name non-^/subversion/ revnums.
+ # ### Q: What if we actually commit a mergeinfo fix to trunk and then want
+ # ### to backport it?
+ # ### A: We don't merge it using the script.
+ echo "Bogus merge: includes only svn:mergeinfo changes!" >&2
+ exit 2
+ fi
+fi
+if $sh[$MAY_COMMIT]; then
+ # Remove the approved entry. The sentinel is important when the entry being
+ # removed is the very last one in STATUS, and in that case it has two effects:
+ # (1) keeps STATUS from ending in a run of multiple empty lines;
+ # (2) makes the \x{7d}k motion behave the same as in all other cases.
+ #
+ # Use a tempfile because otherwise backport_main() would see the "sentinel paragraph".
+ # Since backport_main() has an open descriptor, it will continue to see
+ # the STATUS inode that existed when control flow entered backport_main();
+ # since we replace the file on disk, when this block of code runs in the
+ # next iteration, it will see the new contents.
+ cp $STATUS $STATUS.t
+ (echo; echo; echo "sentinel paragraph") >> $STATUS.t
+ $VIM -e -s -n -N -i NONE -u NONE -c ':0normal! $parno\x{7d}kdap' -c wq $STATUS.t
+ $VIM -e -s -n -N -i NONE -u NONE -c '\$normal! dap' -c wq $STATUS.t
+ mv $STATUS.t $STATUS
+ $SVNq commit -F $logmsg_filename
+elif ! $sh[$YES]; then
+ echo "Would have committed:"
+ echo '[[['
+ $SVN status -q
+ echo 'M STATUS (not shown in the diff)'
+ cat $logmsg_filename
+ echo ']]]'
+fi
+EOF
+
+ if ($MAY_COMMIT) {
+ # STATUS has been edited and the change has been committed
+ push @MERGES_TODAY, \%entry;
+ }
+
+ $script .= <<"EOF" if $entry{branch};
+reinteg_rev=\`$SVN info $STATUS | sed -ne 's/Last Changed Rev: //p'\`
+if $sh[$MAY_COMMIT]; then
+ # Sleep to avoid out-of-order commit notifications
+ if $sh[$YES]; then sleep 15; fi
+ $SVNq rm $BRANCHES/$shell_escaped_branch -m "Remove the '"$shell_escaped_branch"' branch, $reintegrated_word in r\$reinteg_rev."
+ if $sh[$YES]; then sleep 1; fi
+elif ! $sh[$YES]; then
+ echo "Would remove $reintegrated_word '"$shell_escaped_branch"' branch"
+fi
+EOF
+
+ # Include the time so it's easier to find the interesting backups.
+ my $backupfile = strftime "backport_pl.%Y%m%d-%H%M%S.$$.tmp", localtime;
+ die if -s $backupfile;
+ system("$SVN diff > $backupfile") == 0
+ or die "Saving a backup diff ($backupfile) failed ($?): $!";
+ if (-z $backupfile) {
+ unlink $backupfile;
+ } else {
+ warn "Local mods saved to '$backupfile'\n";
+ }
+
+ # If $MAY_COMMIT, then $script will edit STATUS anyway.
+ revert(verbose => 0, discard_STATUS => $MAY_COMMIT);
+
+ $MERGED_SOMETHING++;
+ my ($exit_code, $outlines, $errlines) = run_in_shell $script;
+ unless ($! == 0) {
+ die "system() failed to spawn subshell ($!); aborting";
+ }
+ unless ($exit_code == 0) {
+ warn "$0: subshell exited with code $exit_code (in '$entry{header}') "
+ ."(maybe due to 'set -e'?)";
+
+ # If we're committing, don't attempt to guess the problem and gracefully
+ # continue; just abort.
+ if ($MAY_COMMIT) {
+ die "Lost track of paragraph numbers; aborting";
+ }
+
+ # Record the error, unless the caller wants not to.
+ $ERRORS{$entry{id}} = [\%entry, "subshell exited with code $exit_code"]
+ unless $expected_error_p->($exit_code, $outlines, $errlines);
+ }
+
+ unlink $logmsg_filename unless $exit_code;
+}
+
+# Input formats:
+# "1.8.x-r42",
+# "branches/1.8.x-r42",
+# "branches/1.8.x-r42/",
+# "subversion/branches/1.8.x-r42",
+# "subversion/branches/1.8.x-r42/",
+# "^/subversion/branches/1.8.x-r42",
+# "^/subversion/branches/1.8.x-r42/",
+# Return value:
+# "1.8.x-r42"
+# Works for any branch name that doesn't include slashes.
+sub sanitize_branch {
+ local $_ = shift;
+ s/^\s*//;
+ s/\s*$//;
+ s#/*$##;
+ s#.*/##;
+ return $_;
+}
+
+sub logsummarysummary {
+ my $entry = shift;
+ join "",
+ $entry->{logsummary}->[0], ('[...]' x (0 < $#{$entry->{logsummary}}))
+}
+
+# TODO: may need to parse other headers too?
+sub parse_entry {
+ my $raw = shift;
+ my $parno = shift;
+ my @lines = @_;
+ my $depends;
+ my $accept;
+ my (@revisions, @logsummary, $branch, @votes);
+ # @lines = @_;
+
+ # strip spaces to match up with the indention
+ $_[0] =~ s/^( *)\* //;
+ my $indentation = ' ' x (length($1) + 2);
+ s/^$indentation// for @_;
+
+ # Ignore trailing spaces: it is not significant on any field, and makes the
+ # regexes simpler.
+ s/\s*$// for @_;
+
+ # revisions
+ $branch = sanitize_branch $1
+ and shift
+ if $_[0] =~ /^(\S*) branch$/ or $_[0] =~ m#branches/(\S+)#;
+ while ($_[0] =~ /^(?:r?\d+[,; ]*)+$/) {
+ push @revisions, ($_[0] =~ /(\d+)/g);
+ shift;
+ }
+
+ # summary
+ do {
+ push @logsummary, shift
+ } until $_[0] =~ /^\s*[A-Z][][\w]*:/ or not defined $_[0];
+
+ # votes
+ unshift @votes, pop until $_[-1] =~ /^\s*Votes:/ or not defined $_[-1];
+ pop;
+
+ # depends, branch, notes
+ # Ignored headers: Changes[*]
+ while (@_) {
+ given (shift) {
+ when (/^Depends:/) {
+ $depends++;
+ }
+ if (s/^Branch:\s*//) {
+ $branch = sanitize_branch ($_ || shift || die "Branch header found without value");
+ }
+ if (s/^Notes:\s*//) {
+ my $notes = $_;
+ $notes .= shift while @_ and $_[0] !~ /^\w/;
+ my %accepts = map { $_ => 1 } ($notes =~ /--accept[ =]([a-z-]+)/g);
+ given (scalar keys %accepts) {
+ when (0) { }
+ when (1) { $accept = [keys %accepts]->[0]; }
+ default {
+ warn "Too many --accept values at '",
+ logsummarysummary({ logsummary => [@logsummary] }),
+ "'";
+ }
+ }
+ }
+ }
+ }
+
+ # Compute a header.
+ my ($header, $id);
+ if ($branch) {
+ $header = "the $branch branch";
+ $id = $branch;
+ } elsif (@revisions == 1) {
+ $header = "r$revisions[0]";
+ $id = "r$revisions[0]";
+ } elsif (@revisions) {
+ $header = "the r$revisions[0] group";
+ $id = "r$revisions[0]";
+ } else {
+ die "Entry '$raw' has neither revisions nor branch";
+ }
+ my $header_start = ($header =~ /^the/ ? ucfirst($header) : $header);
+
+ warn "Entry has both branch '$branch' and --accept=$accept specified\n"
+ if $branch and $accept;
+
+ return (
+ revisions => [@revisions],
+ logsummary => [@logsummary],
+ branch => $branch,
+ header => $header,
+ header_start => $header_start,
+ depends => $depends,
+ id => $id,
+ votes => [@votes],
+ entry => [@lines],
+ accept => $accept,
+ raw => $raw,
+ digest => digest_entry($raw),
+ parno => $parno, # $. from backport_main()
+ );
+}
+
+sub edit_string {
+ # Edits $_[0] in an editor.
+ # $_[1] is used in error messages.
+ die "$0: called edit_string() in non-interactive mode!" if $YES;
+ my $string = shift;
+ my $name = shift;
+ my %args = @_;
+ my $trailing_eol = $args{trailing_eol};
+ my ($fh, $fn) = my_tempfile();
+ print $fh $string;
+ $fh->flush or die $!;
+ system("$EDITOR -- $fn") == 0
+ or warn "\$EDITOR failed editing $name: $! ($?); "
+ ."edit results ($fn) ignored.";
+ my $rv = `cat $fn`;
+ $rv =~ s/\n*\z// and $rv .= ("\n" x $trailing_eol) if defined $trailing_eol;
+ $rv;
+}
+
+sub vote {
+ my ($state, $approved, $votes) = @_;
+ # TODO: use votesarray instead of votescheck
+ my (%approvedcheck, %votescheck);
+ my $raw_approved = "";
+ my @votesarray;
+ return unless %$approved or %$votes;
+
+ # If $AVAILID is undef, we can only process 'edit' pseudovotes; handle_entry() is
+ # supposed to prevent numeric (±1,±0) votes from getting to this point.
+ die "Assertion failed" if not defined $AVAILID
+ and grep { $_ ne 'edit' } map { $_->[0] } values %$votes;
+
+ my $had_empty_line;
+
+ $. = 0;
+ open STATUS, "<", $STATUS;
+ open VOTES, ">", "$STATUS.$$.tmp";
+ while (<STATUS>) {
+ $had_empty_line = /\n\n\z/;
+ my $key = digest_entry $_;
+
+ $approvedcheck{$key}++ if exists $approved->{$key};
+ $votescheck{$key}++ if exists $votes->{$key};
+
+ unless (exists $votes->{$key} or exists $approved->{$key}) {
+ print VOTES;
+ next;
+ }
+
+ unless (exists $votes->{$key}) {
+ push @votesarray, {
+ entry => $approved->{$key},
+ approval => 1,
+ digest => $key,
+ };
+ $raw_approved .= $_;
+ next;
+ }
+
+ # We have a vote, and potentially an approval.
+
+ my ($vote, $entry) = @{$votes->{$key}};
+ push @votesarray, {
+ entry => $entry,
+ vote => $vote,
+ approval => (exists $approved->{$key}),
+ digest => $key,
+ };
+
+ if ($vote eq 'edit') {
+ local $_ = $entry->{raw};
+ $votesarray[-1]->{digest} = digest_entry $_;
+ (exists $approved->{$key}) ? ($raw_approved .= $_) : (print VOTES);
+ next;
+ }
+
+ s/^(\s*\Q$vote\E:.*)/"$1, $AVAILID"/me
+ or s/(.*\w.*?\n)/"$1 $vote: $AVAILID\n"/se;
+ $_ = edit_string $_, $entry->{header}, trailing_eol => 2
+ if $vote ne '+1';
+ $votesarray[-1]->{digest} = digest_entry $_;
+ (exists $approved->{$key}) ? ($raw_approved .= $_) : (print VOTES);
+ }
+ close STATUS;
+ print VOTES "\n" if $raw_approved and !$had_empty_line;
+ print VOTES $raw_approved;
+ close VOTES;
+ warn "Some vote chunks weren't found: ",
+ join ',',
+ map $votes->{$_}->[1]->{id},
+ grep { !$votescheck{$_} } keys %$votes
+ if scalar(keys %$votes) != scalar(keys %votescheck);
+ warn "Some approval chunks weren't found: ",
+ join ',',
+ map $approved->{$_}->{id},
+ grep { !$approvedcheck{$_} } keys %$approved
+ if scalar(keys %$approved) != scalar(keys %approvedcheck);
+ prompt "Press the 'any' key to continue...\n", dontprint => 1
+ if scalar(keys %$approved) != scalar(keys %approvedcheck)
+ or scalar(keys %$votes) != scalar(keys %votescheck);
+ move "$STATUS.$$.tmp", $STATUS;
+
+ my $logmsg = do {
+ my @sentences = map {
+ my $words_vote = ", approving" x $_->{approval};
+ my $words_edit = " and approve" x $_->{approval};
+ exists $_->{vote}
+ ? (
+ ( $_->{vote} eq 'edit'
+ ? "Edit$words_edit the $_->{entry}->{id} entry"
+ : "Vote $_->{vote} on $_->{entry}->{header}$words_vote"
+ )
+ . "."
+ )
+ : # exists only in $approved
+ "Approve $_->{entry}->{header}."
+ } @votesarray;
+ (@sentences == 1)
+ ? "* STATUS: $sentences[0]"
+ : "* STATUS:\n" . join "", map " $_\n", @sentences;
+ };
+
+ system "$SVN diff -- $STATUS";
+ printf "[[[\n%s%s]]]\n", $logmsg, ("\n" x ($logmsg !~ /\n\z/));
+ if (prompt "Commit these votes? ") {
+ my ($logmsg_fh, $logmsg_filename) = my_tempfile();
+ print $logmsg_fh $logmsg;
+ close $logmsg_fh;
+ system("$SVN commit -F $logmsg_filename -- $STATUS") == 0
+ or warn("Committing the votes failed($?): $!") and return;
+ unlink $logmsg_filename;
+
+ # Add to state votes that aren't '+0' or 'edit'
+ $state->{$_->{digest}}++ for grep
+ +{ qw/-1 t -0 t +1 t/ }->{$_->{vote}},
+ @votesarray;
+ }
+}
+
+sub check_local_mods_to_STATUS {
+ if (`$SVN status -q $STATUS`) {
+ die "Local mods to STATUS file $STATUS" if $YES;
+ warn "Local mods to STATUS file $STATUS";
+ system "$SVN diff -- $STATUS";
+ prompt "Press the 'any' key to continue...\n", dontprint => 1;
+ return 1;
+ }
+ return 0;
+}
+
+sub renormalize_STATUS {
+ my $vimscript = <<'EOVIM';
+:"" Strip trailing whitespace before entries and section headers, but not
+:"" inside entries (e.g., multi-paragraph Notes: fields).
+:""
+:"" Since an entry is always followed by another entry, section header, or EOF,
+:"" there is no need to separately strip trailing whitespace from lines following
+:"" entries.
+:%s/\v\s+\n(\s*\n)*\ze(\s*[*]|\w)/\r\r/g
+
+:"" Ensure there is exactly one blank line around each entry and header.
+:""
+:"" First, inject a new empty line above and below each entry and header; then,
+:"" squeeze runs of empty lines together.
+:0/^=/,$ g/^ *[*]/normal! O
+:g/^=/normal! o
+:g/^=/-normal! O
+:
+:%s/\n\n\n\+/\r\r/g
+
+:"" Save.
+:wq
+EOVIM
+ open VIM, '|-', $VIM, qw/-e -s -n -N -i NONE -u NONE --/, $STATUS
+ or die "Can't renormalize STATUS: $!";
+ print VIM $vimscript;
+ close VIM or warn "$0: renormalize_STATUS failed ($?): $!)";
+
+ system("$SVN commit -m '* STATUS: Whitespace changes only.' -- $STATUS") == 0
+ or die "$0: Can't renormalize STATUS ($?): $!"
+ if $MAY_COMMIT;
+}
+
+sub revert {
+ my %args = @_;
+ die "Bug: \$args{verbose} undefined" unless exists $args{verbose};
+ die "Bug: unknown argument" if grep !/^(?:verbose|discard_STATUS)$/, keys %args;
+
+ copy $STATUS, "$STATUS.$$.tmp" unless $args{discard_STATUS};
+ system("$SVN revert -q $STATUS") == 0
+ or die "revert failed ($?): $!";
+ system("$SVN revert -R ./" . (" -q" x !$args{verbose})) == 0
+ or die "revert failed ($?): $!";
+ move "$STATUS.$$.tmp", $STATUS unless $args{discard_STATUS};
+ $MERGED_SOMETHING = 0;
+}
+
+sub maybe_revert {
+ # This is both a SIGINT handler, and the tail end of main() in normal runs.
+ # @_ is 'INT' in the former case and () in the latter.
+ delete $SIG{INT} unless @_;
+ revert verbose => 1 if !$YES and $MERGED_SOMETHING and prompt 'Revert? ';
+ (@_ ? exit : return);
+}
+
+sub signal_handler {
+ my $sig = shift;
+
+ # Clean up after prompt()
+ ReadMode 'normal';
+
+ # Fall back to default action
+ delete $SIG{$sig};
+ kill $sig, $$;
+}
+
+sub warning_summary {
+ return unless %ERRORS;
+
+ warn "Warning summary\n";
+ warn "===============\n";
+ warn "\n";
+ for my $id (keys %ERRORS) {
+ my $title = logsummarysummary $ERRORS{$id}->[0];
+ warn "$id ($title): $ERRORS{$id}->[1]\n";
+ }
+}
+
+sub read_state {
+ # die "$0: called read_state() in non-interactive mode!" if $YES;
+
+ open my $fh, '<', $STATEFILE or do {
+ return {} if $!{ENOENT};
+ die "Can't read statefile: $!";
+ };
+
+ my %rv;
+ while (<$fh>) {
+ chomp;
+ $rv{$_}++;
+ }
+ return \%rv;
+}
+
+sub write_state {
+ my $state = shift;
+ open STATE, '>', $STATEFILE or warn("Can't write state: $!"), return;
+ say STATE for keys %$state;
+ close STATE;
+}
+
+sub exit_stage_left {
+ my $state = shift;
+ maybe_revert;
+ warning_summary if $YES;
+ vote $state, @_;
+ write_state $state;
+ exit scalar keys %ERRORS;
+}
+
+# Given an ENTRY, check whether all ENTRY->{revisions} have been merged
+# into ENTRY->{branch}, if it has one. If revisions are missing, record
+# a warning in $ERRORS. Return TRUE If the entry passed the validation
+# and FALSE otherwise.
+sub validate_branch_contains_named_revisions {
+ my %entry = @_;
+ return 1 unless defined $entry{branch};
+ my %present;
+
+ return "Why are you running so old versions?" # true in boolean context
+ if $SVNvsn < 1_005_000; # doesn't have the 'mergeinfo' subcommand
+
+ my $shell_escaped_branch = shell_escape($entry{branch});
+ %present = do {
+ my @present = `$SVN mergeinfo --show-revs=merged -- $TRUNK $BRANCHES/$shell_escaped_branch &&
+ $SVN mergeinfo --show-revs=eligible -- $BRANCHES/$shell_escaped_branch`;
+ chomp @present;
+ @present = map /(\d+)/g, @present;
+ map +($_ => 1), @present;
+ };
+
+ my @absent = grep { not exists $present{$_} } @{$entry{revisions}};
+
+ if (@absent) {
+ $ERRORS{$entry{id}} //= [\%entry,
+ sprintf("Revisions '%s' nominated but not included in branch",
+ (join ", ", map { "r$_" } @absent)),
+ ];
+ }
+ return @absent ? 0 : 1;
+}
+
+sub handle_entry {
+ my $in_approved = shift;
+ my $approved = shift;
+ my $votes = shift;
+ my $state = shift;
+ my $raw = shift;
+ my $parno = shift;
+ my $skip = shift;
+ my %entry = parse_entry $raw, $parno, @_;
+ my @vetoes = grep /^\s*-1:/, @{$entry{votes}};
+
+ my $match = defined($skip) ? ($raw =~ /\Q$skip\E/ or $raw =~ /$skip/msi) : 0
+ unless $YES;
+
+ if ($YES) {
+ # Run a merge if:
+ unless (@vetoes) {
+ if ($MAY_COMMIT and $in_approved) {
+ # svn-role mode
+ merge \%entry if validate_branch_contains_named_revisions %entry;
+ } elsif (!$MAY_COMMIT) {
+ # Scan-for-conflicts mode
+
+ # First, sanity-check the entry. We ignore the result; even if it
+ # failed, we do want to check for conflicts, in the remainder of this
+ # block.
+ validate_branch_contains_named_revisions %entry;
+
+ # E155015 is SVN_ERR_WC_FOUND_CONFLICT
+ my $expected_error_p = sub {
+ my ($exit_code, $outlines, $errlines) = @_;
+ ($exit_code == 0)
+ or
+ (grep /svn: E155015:/, @$errlines)
+ };
+ merge \%entry, ($entry{depends} ? $expected_error_p : undef);
+
+ my $output = `$SVN status`;
+
+ # Pre-1.6 svn's don't have the 7th column, so fake it.
+ $output =~ s/^(......)/$1 /mg if $SVNvsn < 1_006_000;
+
+ my (@conflicts) = ($output =~ m#^(?:C......|.C.....|......C)\s(.*)#mg);
+ if (@conflicts and !$entry{depends}) {
+ $ERRORS{$entry{id}} //= [\%entry,
+ sprintf "Conflicts on %s%s%s",
+ '[' x !!$#conflicts,
+ (join ', ',
+ map { basename $_ }
+ @conflicts),
+ ']' x !!$#conflicts,
+ ];
+ say STDERR "Conflicts merging $entry{header}!";
+ say STDERR "";
+ say STDERR $output;
+ system "$SVN diff -- " . join ' ', shell_escape @conflicts;
+ } elsif (!@conflicts and $entry{depends}) {
+ # Not a warning since svn-role may commit the dependency without
+ # also committing the dependent in the same pass.
+ print "No conflicts merging $entry{header}, but conflicts were "
+ ."expected ('Depends:' header set)\n";
+ } elsif (@conflicts) {
+ say "Conflicts found merging $entry{header}, as expected.";
+ }
+ revert verbose => 0;
+ }
+ }
+ } elsif (defined($skip) ? not $match : $state->{$entry{digest}}) {
+ print "\n\n";
+ my $reason = defined($skip) ? "doesn't match pattern"
+ : "remove $STATEFILE to reset";
+ say "Skipping $entry{header} ($reason):";
+ say logsummarysummary \%entry;
+ } elsif ($match or not defined $skip) {
+ # This loop is just a hack because 'goto' panics. The goto should be where
+ # the "next PROMPT;" is; there's a "last;" at the end of the loop body.
+ PROMPT: while (1) {
+ say "";
+ say "\n>>> $entry{header_start}:";
+ say join ", ", map { "r$_" } @{$entry{revisions}} if @{$entry{revisions}};
+ say "$BRANCHES/$entry{branch}" if $entry{branch};
+ say "--accept=$entry{accept}" if $entry{accept};
+ say "";
+ say for @{$entry{logsummary}};
+ say "";
+ say for @{$entry{votes}};
+ say "";
+ say "Vetoes found!" if @vetoes;
+
+ # See above for why the while(1).
+ QUESTION: while (1) {
+ my $key = $entry{digest};
+ given (prompt 'Run a merge? [y,l,v,±1,±0,q,e,a, ,N,?] ',
+ verbose => 1, extra => qr/[+-]/) {
+ when (/^y/i) {
+ # TODO: validate_branch_contains_named_revisions %entry;
+ merge \%entry;
+ while (1) {
+ given (prompt "Shall I open a subshell? [ydN?] ", verbose => 1) {
+ when (/^y/i) {
+ # TODO: if $MAY_COMMIT, save the log message to a file (say,
+ # backport.logmsg in the wcroot).
+ system($SHELL) == 0
+ or warn "Creating an interactive subshell failed ($?): $!"
+ }
+ when (/^d/) {
+ system("$SVN diff | $PAGER") == 0
+ or warn "diff failed ($?): $!";
+ next;
+ }
+ when (/^[?]/i) {
+ print $BACKPORT_OPTIONS_MERGE_OPTIONS_HELP;
+ next;
+ }
+ when (/^N/i) {
+ # fall through.
+ }
+ default {
+ next;
+ }
+ }
+ revert verbose => 1;
+ next PROMPT;
+ }
+ # NOTREACHED
+ }
+ when (/^l/i) {
+ if ($entry{branch}) {
+ system "$SVN log --stop-on-copy -v -g -r 0:HEAD -- "
+ .shell_escape("$BRANCHES/$entry{branch}")." "
+ ."| $PAGER";
+ } elsif (@{$entry{revisions}}) {
+ system "$SVN log ".(join ' ', map { "-r$_" } @{$entry{revisions}})
+ ." -- ^/subversion | $PAGER";
+ } else {
+ die "Assertion failed: entry has neither branch nor revisions:\n",
+ '[[[', (join ';;', %entry), ']]]';
+ }
+ next PROMPT;
+ }
+ when (/^v/i) {
+ say "";
+ say for @{$entry{entry}};
+ say "";
+ next QUESTION;
+ }
+ when (/^q/i) {
+ exit_stage_left $state, $approved, $votes;
+ }
+ when (/^a/i) {
+ $approved->{$key} = \%entry;
+ next PROMPT;
+ }
+ when (/^([+-][01])\s*$/i) {
+ next QUESTION if warned_cannot_commit "Entering a vote failed";
+ $votes->{$key} = [$1, \%entry];
+ say "Your '$1' vote has been recorded." if $VERBOSE;
+ last PROMPT;
+ }
+ when (/^e/i) {
+ prompt "Press the 'any' key to continue...\n"
+ if warned_cannot_commit "Committing this edit later on may fail";
+ my $original = $entry{raw};
+ $entry{raw} = edit_string $entry{raw}, $entry{header},
+ trailing_eol => 2;
+ # TODO: parse the edited entry (empty lines, logsummary+votes, etc.)
+ $votes->{$key} = ['edit', \%entry] # marker for the 2nd pass
+ if $original ne $entry{raw};
+ last PROMPT;
+ }
+ when (/^N/i) {
+ $state->{$entry{digest}}++;
+ last PROMPT;
+ }
+ when (/^\x20/) {
+ last PROMPT; # Fall off the end of the given/when block.
+ }
+ when (/^[?]/i) {
+ print $BACKPORT_OPTIONS_HELP;
+ next QUESTION;
+ }
+ default {
+ say "Please use one of the options in brackets (q to quit)!";
+ next QUESTION;
+ }
+ }
+ last; } # QUESTION
+ last; } # PROMPT
+ } else {
+ # NOTREACHED
+ die "Unreachable code reached.";
+ }
+
+ 1;
+}
+
+
+sub backport_main {
+ my %approved;
+ my %votes;
+ my $state = read_state;
+ my $renormalize;
+
+ if (@ARGV && $ARGV[0] eq '--renormalize') {
+ $renormalize = 1;
+ shift;
+ }
+
+ backport_usage, exit 0 if @ARGV > ($YES ? 0 : 1) or grep /^--help$/, @ARGV;
+ backport_usage, exit 0 if grep /^(?:-h|-\?|--help|help)$/, @ARGV;
+ my $skip = shift; # maybe undef
+ # assert not defined $skip if $YES;
+
+ open STATUS, "<", $STATUS or (backport_usage, exit 1);
+
+ # Because we use the ':normal' command in Vim...
+ die "A vim with the +ex_extra feature is required for --renormalize and "
+ ."\$MAY_COMMIT modes"
+ if ($renormalize or $MAY_COMMIT) and `${VIM} --version` !~ /[+]ex_extra/;
+
+ # ### TODO: need to run 'revert' here
+ # ### TODO: both here and in merge(), unlink files that previous merges added
+ # When running from cron, there shouldn't be local mods. (For interactive
+ # usage, we preserve local mods to STATUS.)
+ system("$SVN info $STATUS >/dev/null") == 0
+ or die "$0: svn error; point \$SVN to an appropriate binary";
+
+ check_local_mods_to_STATUS;
+ renormalize_STATUS if $renormalize;
+
+ # Skip most of the file
+ $/ = ""; # paragraph mode
+ while (<STATUS>) {
+ last if /^Status of \d+\.\d+/;
+ }
+
+ $SIG{INT} = \&maybe_revert unless $YES;
+ $SIG{TERM} = \&signal_handler unless $YES;
+
+ my $in_approved = 0;
+ while (<STATUS>) {
+ my $lines = $_;
+ my @lines = split /\n/;
+
+ given ($lines[0]) {
+ # Section header
+ when (/^[A-Z].*:$/i) {
+ say "\n\n=== $lines[0]" unless $YES;
+ $in_approved = $lines[0] =~ /^Approved changes/;
+ }
+ # Comment
+ when (/^[#\x5b]/i) {
+ next;
+ }
+ # Separator after section header
+ when (/^=+$/i) {
+ break;
+ }
+ # Backport entry?
+ when (/^ *\*/) {
+ warn "Too many bullets in $lines[0]" and next
+ if grep /^ *\*/, @lines[1..$#lines];
+ handle_entry $in_approved, \%approved, \%votes, $state, $lines, $.,
+ $skip,
+ @lines;
+ }
+ default {
+ warn "Unknown entry '$lines[0]'";
+ }
+ }
+ }
+
+ exit_stage_left $state, \%approved, \%votes;
+}
+
+sub nominate_main {
+ my $had_local_mods;
+
+ local $Text::Wrap::columns = 79;
+
+ $had_local_mods = check_local_mods_to_STATUS;
+
+ # Argument parsing.
+ nominate_usage, exit 0 if @ARGV != 2;
+ my (@revnums) = (+shift) =~ /(\d+)/g;
+ my $justification = shift;
+
+ die "Unable to proceed." if warned_cannot_commit "Nominating failed";
+
+ @revnums = sort { $a <=> $b } keys %{{ map { $_ => 1 } @revnums }};
+ die "No revision numbers specified" unless @revnums;
+
+ # Determine whether a backport branch exists
+ my ($URL) = `$SVN info` =~ /^URL: (.*)$/m;
+ die "Can't retrieve URL of cwd" unless $URL;
+
+ die unless shell_safe_path_or_url $URL;
+ system "$SVN info -- $URL-r$revnums[0] 2>/dev/null";
+ my $branch = ($? == 0) ? basename("$URL-r$revnums[0]") : undef;
+
+ # Construct entry.
+ my $logmsg = `$SVN propget --revprop -r $revnums[0] --strict svn:log '^/'`;
+ die "Can't fetch log message of r$revnums[0]: $!" unless $logmsg;
+
+ unless ($logmsg =~ s/^(.*?)\n\n.*/$1/s) {
+ # "* file\n (symbol): Log message."
+
+ # Strip before and after the first symbol's log message.
+ $logmsg =~ s/^.*?: //s;
+ $logmsg =~ s/^ \x28.*//ms;
+
+ # Undo line wrapping. (We'll re-do it later.)
+ $logmsg =~ s/\s*\n\s+/ /g;
+ }
+
+ my @lines;
+ warn "Wrapping [$logmsg]\n" if $DEBUG;
+ push @lines, wrap " * ", ' 'x3, join ', ', map "r$_", @revnums;
+ push @lines, wrap ' 'x3, ' 'x3, split /\n/, $logmsg;
+ push @lines, " Justification:";
+ push @lines, wrap ' 'x5, ' 'x5, $justification;
+ push @lines, " Branch: $branch" if defined $branch;
+ push @lines, " Votes:";
+ push @lines, " +1: $AVAILID";
+ push @lines, "";
+ my $raw = join "", map "$_\n", @lines;
+
+ # Open the file in line-mode (not paragraph-mode).
+ my @STATUS;
+ tie @STATUS, "Tie::File", $STATUS, recsep => "\n";
+ my ($index) = grep { $STATUS[$_] =~ /^Veto/ } (0..$#STATUS);
+ die "Couldn't find where to add an entry" unless $index;
+
+ # Add an empty line if needed.
+ if ($STATUS[$index-1] =~ /\S/) {
+ splice @STATUS, $index, 0, "";
+ $index++;
+ }
+
+ # Add the entry.
+ splice @STATUS, $index, 0, @lines;
+
+ # Save.
+ untie @STATUS;
+
+ # Done!
+ system "$SVN diff -- $STATUS";
+ if (prompt "Commit this nomination? ") {
+ system "$SVN commit -m '* STATUS: Nominate r$revnums[0].' -- $STATUS";
+ exit $?;
+ }
+ elsif (!$had_local_mods or prompt "Revert STATUS (destroying local mods)? ") {
+ # TODO: we could be smarter and just un-splice the lines we'd added.
+ system "$SVN revert -- $STATUS";
+ exit $?;
+ }
+
+ exit 0;
+}
+
+# Dispatch to the appropriate main().
+given (basename($0)) {
+ when (/^b$|backport/) {
+ chdir dirname $0 or die "Can't chdir: $!" if /^b$/;
+ &backport_main(@ARGV);
+ }
+ when (/^n$|nominate/) {
+ chdir dirname $0 or die "Can't chdir: $!" if /^n$/;
+ &nominate_main(@ARGV);
+ }
+ default {
+ &backport_main(@ARGV);
+ }
+}
diff --git a/tools/dist/backport/__init__.py b/tools/dist/backport/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/dist/backport/__init__.py
diff --git a/tools/dist/backport/merger.py b/tools/dist/backport/merger.py
new file mode 100644
index 0000000..863750c
--- /dev/null
+++ b/tools/dist/backport/merger.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python3
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+backport.merger - library for running STATUS merges
+"""
+
+import backport.status
+
+import contextlib
+import functools
+import logging
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+
+logger = logging.getLogger(__name__)
+
+# The 'svn' binary
+SVN = os.getenv('SVN', 'svn')
+# TODO: maybe run 'svn info' to check if it works / fail early?
+
+
+class UnableToMergeException(Exception):
+ pass
+
+
+def invoke_svn(argv):
+ "Run svn with ARGV as argv[1:]. Return (exit_code, stdout, stderr)."
+ # TODO(interactive mode): disable --non-interactive
+ child_env = os.environ.copy()
+ child_env.update({'LC_ALL': 'C'})
+ argv = [SVN, '--non-interactive', '--config-option=config:miscellany:log-encoding=UTF-8'] + argv
+ child = subprocess.Popen(argv,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=child_env)
+ stdout, stderr = child.communicate()
+ return child.returncode, stdout.decode('UTF-8'), stderr.decode('UTF-8')
+
+def run_svn(argv, expected_stderr=None):
+ """Run svn with ARGV as argv[1:]. If EXPECTED_STDERR is None, raise if the
+ exit code is non-zero or stderr is non-empty. Else, treat EXPECTED_STDERR as
+ a regexp, and ignore an errorful exit or stderr messages if the latter match
+ the regexp. Return exit_code, stdout, stderr."""
+
+ exit_code, stdout, stderr = invoke_svn(argv)
+ if exit_code == 0 and not stderr:
+ return exit_code, stdout, stderr
+ elif expected_stderr and re.compile(expected_stderr).search(stderr):
+ return exit_code, stdout, stderr
+ else:
+ logger.warning("Unexpected stderr: %r", stderr)
+ # TODO: pass stdout/stderr to caller?
+ raise subprocess.CalledProcessError(returncode=exit_code,
+ cmd=[SVN] + argv)
+
+def run_svn_quiet(argv, *args, **kwargs):
+ "Wrapper for run_svn(-q)."
+ return run_svn(['-q'] + argv, *args, **kwargs)
+
+class Test_invoking_cmdline_client(unittest.TestCase):
+ def test_run_svn(self):
+ _, stdout, _ = run_svn(['--version', '-q'])
+ self.assertRegex(stdout, r'^1\.[0-9]+\.[0-9]+')
+
+ run_svn(['--version', '--no-such-option'], "invalid option")
+
+ with self.assertLogs() as cm:
+ with self.assertRaises(subprocess.CalledProcessError):
+ run_svn(['--version', '--no-such-option'])
+ self.assertRegex(cm.output[0], "Unexpected stderr.*")
+
+ def test_svn_version(self):
+ self.assertGreaterEqual(svn_version(), (1, 0))
+
+
+@functools.lru_cache(maxsize=1)
+def svn_version():
+ "Return the version number of the 'svn' binary as a (major, minor) tuple."
+ _, stdout, _ = run_svn(['--version', '-q'])
+ match = re.compile(r'(\d+)\.(\d+)').match(stdout)
+ assert match
+ return tuple(map(int, match.groups()))
+
+def run_revert():
+ return run_svn(['revert', '-q', '-R', './'])
+
+def last_changed_revision(path_or_url):
+ "Return the 'Last Changed Rev:' of PATH_OR_URL."
+
+ if svn_version() >= (1, 9):
+ return int(run_svn(['info', '--show-item=last-changed-revision', '--',
+ path_or_url])[1])
+ else:
+ _, lines, _ = run_svn(['info', '--', path_or_url]).splitlines()
+ for line in lines:
+ if line.startswith('Last Changed Rev:'):
+ return int(line.split(':', 1)[1])
+ else:
+ raise Exception("'svn info' did not print last changed revision")
+
+def no_local_mods(path):
+ "Check PATH for local mods. Raise if there are any."
+ if run_svn(['status', '-q', '--', path])[1]:
+ raise UnableToMergeException("Local mods on {!r}".format(path))
+
+def _includes_only_svn_mergeinfo_changes(status_output):
+ """Return TRUE iff there is exactly one local mod, and it is an svn:mergeinfo
+ change. Use the provided `status -q` output."""
+
+ if len(status_output.splitlines()) != 1:
+ return False
+
+ _, diff_output, _ = run_svn(['diff'])
+
+ pattern = re.compile(r'^(Added|Modified|Deleted): ')
+ targets = (line.split(':', 1)[1].strip()
+ for line in diff_output.splitlines()
+ if pattern.match(line))
+ if set(targets) == {'svn:mergeinfo'}:
+ return True
+
+ return False
+
+
+@contextlib.contextmanager
+def log_message_file(logmsg):
+ "Context manager that returns a file containing the text LOGMSG."
+ with tempfile.NamedTemporaryFile(mode='w+', encoding="UTF-8") as logmsg_file:
+ logmsg_file.write(logmsg)
+ logmsg_file.flush()
+ yield logmsg_file.name
+
+def merge(entry, expected_stderr=None, *, commit=False):
+ """Merges ENTRY into the working copy at cwd.
+
+ Do not commit the result, unless COMMIT is true. When committing,
+ remove ENTRY from its STATUS file prior to committing.
+
+ EXPECTED_STDERR will be passed to run_svn() for the actual 'merge' command."""
+
+ assert isinstance(entry, backport.status.StatusEntry)
+ assert entry.valid()
+ assert entry.status_file
+
+ sf = entry.status_file
+
+ # TODO(interactive mode): catch the exception
+ validate_branch_contains_named_revisions(entry)
+
+ # Prepare mergeargs and logmsg.
+ logmsg = ""
+ if entry.branch:
+ branch_url = sf.branch_url(entry.branch)
+ if svn_version() >= (1, 8):
+ mergeargs = ['--', branch_url]
+ logmsg = "Merge {}:\n".format(entry.noun())
+ reintegrated_word = "merged"
+ else:
+ mergeargs = ['--reintegrate', '--', branch_url]
+ logmsg = "Reintegrate {}:\n".format(entry.noun())
+ reintegrated_word = "reintegrated"
+ logmsg += "\n"
+ elif entry.revisions:
+ mergeargs = []
+ if entry.accept:
+ mergeargs.append('--accept=%s' % (entry.accept,))
+ logmsg += "Merge {} from trunk, with --accept={}:\n".\
+ format(entry.noun(), entry.accept)
+ else:
+ logmsg += "Merge {} from trunk:\n".format(entry.noun())
+ logmsg += "\n"
+ mergeargs.extend('-c' + str(revision) for revision in entry.revisions)
+ mergeargs.extend(['--', sf.trunk_url()])
+ logmsg += entry.raw
+
+ no_local_mods('.')
+
+ # TODO: use select() to restore interweaving of stdout/stderr
+ _, stdout, stderr = run_svn_quiet(['merge'] + mergeargs, expected_stderr)
+ sys.stdout.write(stdout)
+ sys.stderr.write(stderr)
+
+ _, stdout, _ = run_svn(['status', '-q'])
+ if _includes_only_svn_mergeinfo_changes(stdout):
+ raise UnableToMergeException("Entry %s includes only svn:mergeinfo changes"
+ % entry)
+
+ if commit:
+ sf.remove(entry)
+ sf.unparse(open('./STATUS', 'w'))
+
+ # HACK to make backport_tests pass - the tests should be changed!
+ s = open('./STATUS').read()
+ if s.endswith('\n\n'):
+ s = s[:-1]
+ open('./STATUS', 'w').write(s)
+
+ # Don't assume we can pass UTF-8 in argv.
+ with log_message_file(logmsg) as logmsg_filename:
+ run_svn_quiet(['commit', '-F', logmsg_filename])
+
+ # TODO(interactive mode): add the 'svn status' display
+
+ if entry.branch:
+ revnum = last_changed_revision('./STATUS')
+
+ if commit:
+ # Sleep to avoid out-of-order commit notifications
+ if not os.getenv("SVN_BACKPORT_DONT_SLEEP"): # enabled by the test suite
+ time.sleep(15)
+ second_logmsg = "Remove the {!r} branch, {} in r{}."\
+ .format(entry.branch, reintegrated_word, revnum)
+ run_svn(['rm', '-m', second_logmsg, '--', branch_url])
+ time.sleep(1)
+
+def validate_branch_contains_named_revisions(entry):
+ """Validate that every revision explicitly named in ENTRY has either been
+ merged to its backport branch from trunk, or has been committed directly to
+ its backport branch. Entries that declare no backport branches are
+ considered valid. Return on success, raise on failure."""
+ if not entry.branch:
+ return # valid
+
+ if svn_version() < (1,5): # doesn't have 'svn mergeinfo' subcommand
+ return # skip check
+
+ sf = entry.status_file
+ branch_url = sf.branch_url(entry.branch)
+ present_str = (
+ run_svn(['mergeinfo', '--show-revs=merged', '--', sf.trunk_url(), branch_url])[1]
+ +
+ run_svn(['mergeinfo', '--show-revs=eligible', '--', branch_url])[1]
+ )
+
+ present = map(int, re.compile(r'(\d+)').findall(present_str))
+
+ absent = set(entry.revisions) - set(present)
+
+ if absent:
+ raise UnableToMergeException("Revisions '{}' nominated but not included "
+ "in branch".format(
+ ', '.join('r%d' % revno
+ for revno in absent)))
+
+
+
+def setUpModule():
+ "Set-up function, invoked by 'python -m unittest'."
+ # Suppress warnings generated by the test data.
+ # TODO: some test functions assume .assertLogs is available, they fail with
+ # AttributeError if it's absent (e.g., on python < 3.4).
+ try:
+ unittest.TestCase.assertLogs
+ except AttributeError:
+ logger.setLevel(logging.ERROR)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/dist/backport/status.py b/tools/dist/backport/status.py
new file mode 100644
index 0000000..727939d
--- /dev/null
+++ b/tools/dist/backport/status.py
@@ -0,0 +1,705 @@
+#!/usr/bin/env python3
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+backport.status - library for parsing and unparsing STATUS files
+"""
+
+# Recipe for interactive testing:
+# % python3
+# >>> import backport.status
+# >>> sf = backport.status.StatusFile(open('STATUS'))
+# >>> entries = [p.entry() for p in sf.entries_paras()]
+# >>> entries[0]
+# <backport.status.StatusEntry object at 0x1b88f90>
+# >>>
+
+import collections
+import hashlib
+import io
+import logging
+import re
+import unittest
+
+logger = logging.getLogger(__name__)
+
+
+class ParseException(Exception):
+ pass
+
+
+class _ParagraphsIterator:
+ "A paragraph-based iterator for file-like objects."
+
+ def __init__(self, stream):
+ # KISS implementation, since STATUS files are small.
+ self.stream = stream
+ self.paragraphs = re.compile(r'\n\s*?\n+').split(stream.read())
+
+ def __iter__(self):
+ # Ensure there is exactly one trailing newline.
+ return iter(para.rstrip('\n') + "\n" for para in self.paragraphs)
+
+class Test_ParagraphsIterator(unittest.TestCase):
+ "Unit test for _ParagraphsIterator."
+ def test_basic(self):
+ stream = io.StringIO('foo\nfoo2\n\n\nbar\n')
+ paragraphs = _ParagraphsIterator(stream)
+ self.assertEqual(list(paragraphs), ['foo\nfoo2\n', 'bar\n'])
+
+
+class Kind:
+ "The kind of a single physical paragraph of STATUS. See 'Paragraph'."
+
+ preamble = object()
+ section_header = object()
+ nomination = object()
+ unknown = object()
+
+ # TODO: can avoid the repetition by using the 'enum' module of Python 3.4
+ # That will also make repr() useful.
+ @classmethod
+ def exists(cls, kind):
+ return kind in (cls.preamble, cls.section_header,
+ cls.nomination, cls.unknown)
+
+class Paragraph:
+ """A single physical paragraph of STATUS, which may be either a nomination
+ or something else."""
+
+ def __init__(self, kind, text, entry, containing_section):
+ """Constructor.
+
+ KIND is one of the Kind.* enumerators.
+
+ TEXT is the physical text in the file, used by unparsing.
+
+ ENTRY is the StatusEntry object, if Kind.nomination, else None.
+
+ CONTAINING_SECTION is the text of the section header this paragraph appears
+ within. (If this paragraph is a section header, this refers to itself.)
+ """
+
+ assert Kind.exists(kind)
+ assert (entry is not None) == (kind is Kind.nomination)
+ self.kind = kind
+ self.text = text
+ self._entry = entry
+ self._containing_section = containing_section
+
+ # Private for _paragraph_is_header()
+ _re_equals_line = re.compile(r'^=+$')
+
+ @classmethod
+ def is_header(cls, para_text):
+ """PARA_TEXT is a single physical paragraph, as a bare multiline string.
+
+ If PARA_TEXT is a section header, return the header text; else, return
+ False."""
+ lines = para_text.split('\n', 2)
+ valid = (len(lines) == 3
+ and lines[0].endswith(':')
+ and cls._re_equals_line.match(lines[1])
+ and lines[2] == '')
+ if valid:
+ header = lines[0].rstrip(':')
+ if header:
+ return header
+ return False
+
+ def entry(self):
+ "Validating accessor for ENTRY."
+ assert self.kind is Kind.nomination
+ return self._entry
+
+ def section(self):
+ "Validating accessor for CONTAINING_SECTION."
+ assert self.kind is not Kind.preamble
+ return self._containing_section
+
+ def approved(self):
+ "TRUE if this paragraph is in the approved section, false otherwise."
+ assert self.kind
+ # ### backport.pl used to check just .startswith() here.
+ return self.section() == "Approved changes"
+
+ def unparse(self, stream):
+ "Write this paragraph to STREAM, an open file-like object."
+ if self.kind in (Kind.preamble, Kind.section_header, Kind.unknown):
+ stream.write(self.text + "\n")
+ elif self.kind is Kind.nomination:
+ self.entry().unparse(stream)
+ else:
+ assert False, "Unknown paragraph kind"
+
+ def __repr__(self):
+ return "<Paragraph({!r}, {!r}, {!r}, {!r})>".format(
+ self.kind, self.text, self._entry, self._containing_section
+ )
+
+
+class StatusFile:
+ "Encapsulates the STATUS file."
+
+ def __init__(self, status_fp):
+ "Constructor. STATUS_FP is an open file-like object to parse."
+ self._parse(status_fp)
+ self.validate_unique_entry_ids() # Use-case for making this optional?
+ self._project_root_url = '^/subversion'
+
+ def branch_url(self, branch_basename):
+ """Return the URL of a branch with a given basename, of 'Branch:' headers
+ that specify a basename only.
+
+ The returned URL may be an ^/foo short URL."""
+ return (self._project_root_url + "/branches/" + branch_basename)
+
+ def trunk_url(self):
+ """Return the URL to trunk. Trunk is used as the default merge source.
+
+ The returned URL may be an ^/foo short URL."""
+ return self._project_root_url + '/trunk'
+
+ def _parse(self, status_fp):
+ "Parse self.status_fp into self.paragraphs."
+
+ self.paragraphs = []
+ last_header = None
+ for para_text in _ParagraphsIterator(status_fp):
+ kind = None
+ entry = None
+ header = Paragraph.is_header(para_text)
+ if para_text.isspace():
+ continue
+ elif header:
+ kind = Kind.section_header
+ last_header = header
+ elif last_header is not None:
+ try:
+ entry = StatusEntry(para_text, status_file=self)
+ kind = Kind.nomination
+ except ParseException:
+ kind = Kind.unknown
+ logger.warning("Failed to parse entry {!r} in {!r}".format(
+ para_text, status_fp))
+ else:
+ kind = Kind.preamble
+
+ self.paragraphs.append(Paragraph(kind, para_text, entry, last_header))
+
+ def entries_paras(self):
+ "Return an iterator over entries"
+ return filter(lambda para: para.kind is Kind.nomination,
+ self.paragraphs)
+
+ def validate_unique_entry_ids(self):
+ # TODO: what about [r42, r43] and [r41, r43] entry pairs?
+ """Check if two entries have the same id. If so, mark them both
+ inoperative."""
+
+ # Build an auxiliary data structure.
+ id2entry = collections.defaultdict(list)
+ for para in self.entries_paras():
+ entry = para.entry()
+ id2entry[entry.id()].append(para)
+
+ # Examine it for problems.
+ for entry_id, entry_paras in id2entry.items():
+ if len(entry_paras) != 1:
+ # Found a problem.
+ #
+ # Warn about it, and ignore all involved entries.
+ logger.warning("There is more than one {} entry; ignoring them in "
+ "further processing".format(entry_id))
+ for para in entry_paras:
+ para.kind = Kind.unknown
+
+ def remove(self, entry):
+ "Remove ENTRY from SELF."
+ for para in self.entries_paras():
+ if para.entry() is entry:
+ self.paragraphs.remove(para)
+ return
+ else:
+ assert False, "Attempted to remove non-existent entry"
+
+ def unparse(self, stream):
+ "Write the STATUS file to STREAM, an open file-like object."
+ for para in self.paragraphs:
+ para.unparse(stream)
+
+
+class Test_StatusFile(unittest.TestCase):
+ def test__paragraph_is_header(self):
+ self.assertTrue(Paragraph.is_header("Nominations:\n========\n"))
+ self.assertFalse(Paragraph.is_header("Status of 1.9.12:\n"))
+
+ def test_parse_unparse(self):
+ s = (
+ "*** This release stream is used for testing. ***\n"
+ "\n"
+ "Candidate changes:\n"
+ "==================\n"
+ "\n"
+ " * r42\n"
+ " Bump version number to 1.0.\n"
+ " Votes:\n"
+ " +1: jrandom\n"
+ "\n"
+ "Approved changes:\n"
+ "=================\n"
+ "\n"
+ "This paragraph will trigger an exception.\n"
+ "\n"
+ " * r43\n"
+ " Bump version number to 1.0.\n"
+ " Votes:\n"
+ " +1: jrandom\n"
+ "\n"
+ )
+ test_file = io.StringIO(s)
+ with test_file:
+ with self.assertLogs() as cm:
+ sf = StatusFile(test_file)
+ self.assertRegex(cm.output[0], "Failed to parse.*'.*will trigger.*'")
+
+ self.assertSequenceEqual(
+ tuple(para.kind for para in sf.paragraphs),
+ (Kind.preamble,
+ Kind.section_header, Kind.nomination,
+ Kind.section_header, Kind.unknown, Kind.nomination)
+ )
+ self.assertFalse(sf.paragraphs[1].approved()) # header
+ self.assertFalse(sf.paragraphs[2].approved()) # nomination
+ self.assertTrue(sf.paragraphs[3].approved()) # header
+ self.assertTrue(sf.paragraphs[4].approved()) # unknown
+
+ self.assertIs(sf.paragraphs[2].entry().status_file, sf)
+
+ output_file = io.StringIO()
+ sf.unparse(output_file)
+ self.assertEqual(s, output_file.getvalue())
+
+ def test_double_nomination(self):
+ "Test two nominations of the same group"
+
+ test_file = io.StringIO(
+ "Approved changes:\n"
+ "=================\n"
+ "\n"
+ " * r42\n"
+ " First time.\n"
+ "\n"
+ " * r42\n"
+ " Second time.\n"
+ "\n"
+ )
+
+ with test_file:
+ with self.assertLogs() as cm:
+ sf = StatusFile(test_file)
+ self.assertRegex(cm.output[0], "There is more than one r42 entry")
+ self.assertIs(sf.paragraphs[1].kind, Kind.unknown)
+ self.assertIs(sf.paragraphs[2].kind, Kind.unknown)
+
+
+class StatusEntry:
+ """Encapsulates a single nomination.
+
+ An Entry has the following attributes:
+
+ branch - the backport branch's basename, or None.
+ revisions - the revisions to nominated, as iterable of int.
+ logsummary - the text before the justification, as an array of lines.
+ depends - true if a "Depends:" entry was found, False otherwise.
+ accept - the value to pass to 'svn merge --accept=%s', or None.
+ votes_str - everything after the "Votes:" subheader. An unparsed string.
+ """
+
+ def __init__(self, para_text, status_file=None):
+ """Parse an entry from PARA_TEXT, and add it to SELF. PARA_TEXT must
+ contain exactly one entry, as a single multiline string.
+
+ STATUS_FILE is the StatusFile object containing this entry, if any.
+ """
+ self.branch = None
+ self.revisions = []
+ self.logsummary = []
+ self.depends = False
+ self.accept = None
+ self.votes_str = None
+ self.status_file = status_file
+
+ self.raw = para_text
+
+ _re_entry_indentation = re.compile(r'^( *\* )')
+ _re_revisions_line = re.compile(r'^(?:r?\d+[,; ]*)+$')
+
+ lines = para_text.rstrip().split('\n')
+
+ # Strip indentation and trailing whitespace.
+ match = _re_entry_indentation.match(lines[0])
+ if not match:
+ raise ParseException("Entry found with no ' * ' line")
+ indentation = len(match.group(1))
+ lines = (line[indentation:] for line in lines)
+ lines = (line.rstrip() for line in lines)
+
+ # Consume the generator.
+ lines = list(lines)
+
+ # Parse the revisions lines.
+ match = re.compile(r'(\S*) branch|branches/(\S*)').search(lines[0])
+ if match:
+ # Parse whichever group matched.
+ self.branch = self.parse_branch(match.group(1) or match.group(2))
+ else:
+ while _re_revisions_line.match(lines[0]):
+ self.revisions.extend(map(int, re.compile(r'(\d+)').findall(lines[0])))
+ lines = lines[1:]
+
+ # Validate it now, since later exceptions rely on it.
+ if not(self.branch or self.revisions):
+ raise ParseException("Entry found with neither branch nor revisions")
+
+ # Parse the logsummary.
+ while lines and not self._is_subheader(lines[0]):
+ self.logsummary.append(lines[0])
+ lines = lines[1:]
+
+ # Parse votes.
+ if "Votes:" in lines:
+ index = lines.index("Votes:")
+ self.votes_str = '\n'.join(lines[index+1:]) + '\n'
+ lines = lines[:index]
+ del index
+ else:
+ self.votes_str = None
+
+ # depends, branch, notes
+ while lines:
+
+ if lines[0].strip().startswith('Depends:'):
+ self.depends = True
+ lines = lines[1:]
+ continue
+
+ if lines[0].strip().startswith('Branch:'):
+ maybe_value = lines[0].strip().split(':', 1)[1]
+ if maybe_value.strip():
+ # Value on same line as header
+ self.branch = self.parse_branch(maybe_value)
+ lines = lines[1:]
+ continue
+ else:
+ # Value should be on next line
+ if len(lines) == 1:
+ raise ParseException("'Branch:' header found without value")
+ self.branch = self.parse_branch(lines[1])
+ lines = lines[2:]
+ continue
+
+ if lines[0].strip().startswith('Notes:'):
+ notes = lines[0].strip().split(':', 1)[1] + "\n"
+ lines = lines[1:]
+
+ # Consume the indented body of the "Notes" field.
+ while lines and not lines[0][0].isalnum():
+ notes += lines[0] + "\n"
+ lines = lines[1:]
+
+ # Look for possible --accept directives.
+ matches = re.compile(r'--accept[ =]([a-z-]+)').findall(notes)
+ if len(matches) > 1:
+ raise ParseException("Too many --accept values at %s" % (self,))
+ elif len(matches) == 1:
+ self.accept = matches[0]
+
+ continue
+
+ # else
+ lines = lines[1:]
+ continue
+
+ # Some sanity checks.
+ if self.branch and self.accept:
+ raise ParseException("Entry %s has both --accept and branch" % (self,))
+
+ if not self.logsummary:
+ raise ParseException("No logsummary at %s" % (self,))
+
+ def digest(self):
+ """Return a unique digest of this entry, with the following property: any
+ change to the entry will cause the digest value to change."""
+
+ # Digest the raw text, canonicalizing the number of trailing newlines.
+ # There is no particular reason to use md5 over anything else, except for
+ # compatibility with existing .backports1 files in people's working copies.
+ return hashlib.md5(self.raw.rstrip('\n').encode('UTF-8')
+ + b"\n\n").hexdigest()
+
+ @staticmethod
+ def parse_branch(string):
+ "Extract a branch name from STRING."
+ return string.strip().rstrip('/').split('/')[-1]
+
+ def valid(self):
+ "Test the invariants."
+ return all([
+ self.branch or self.revisions,
+ self.logsummary,
+ not(self.branch and self.accept),
+ ])
+
+ def id(self):
+ "Return the first revision or branch's name."
+ # Assert a minimal invariant, since this is used by error paths.
+ assert self.branch or self.revisions
+ if self.branch is not None:
+ return self.branch
+ else:
+ return "r{:d}".format(self.revisions[0])
+
+ def noun(self, start_of_sentence=False):
+ """Return a noun phrase describing this entry.
+ START_OF_SENTENCE is used to correctly capitalize the result."""
+ # Assert a minimal invariant, since this is used by error paths.
+ assert self.branch or self.revisions
+ if start_of_sentence:
+ the = "The"
+ else:
+ the = "the"
+ if self.branch is not None:
+ return "{} {} branch".format(the, self.branch)
+ elif len(self.revisions) == 1:
+ return "r{:d}".format(self.revisions[0])
+ else:
+ return "{} r{:d} group".format(the, self.revisions[0])
+
+ def logsummarysummary(self):
+ "Return a one-line summary of the changeset."
+ assert self.valid()
+ suffix = "" if len(self.logsummary) == 1 else " [...]"
+ return self.logsummary[0] + suffix
+
+ # Private for is_vetoed()
+ _re_vetoed = re.compile(r'^\s*(-1:|-1\s*[()])', re.MULTILINE)
+ def is_vetoed(self):
+ "Return TRUE iff a -1 vote has been cast."
+ return self._re_vetoed.search(self.votes_str)
+
+ @staticmethod
+ def _is_subheader(string):
+ """Given a single line from an entry, is that line a subheader (such as
+ "Justification:" or "Notes:")?"""
+ # TODO: maybe change the 'subheader' heuristic? Perhaps "line starts with
+ # an uppercase letter and ends with a colon".
+ #
+ # This is currently only used for finding the end of logsummary, and all
+ # explicitly special-cased headers (e.g., "Depends:") match this, though.
+ return re.compile(r'^\s*[A-Z]\w*:').match(string)
+
+ def unparse(self, stream):
+ "Write this entry to STREAM, an open file-like object."
+ # For now, this is simple.. until we add interactive editing.
+ stream.write(self.raw + "\n")
+
+class Test_StatusEntry(unittest.TestCase):
+ def test___init__(self):
+ "Test the entry parser"
+
+ # All these entries actually have a "four spaces" line as their last line,
+ # but the parser doesn't care.
+
+ s = """\
+ * r42, r43,
+ r44
+ This is the logsummary.
+ Branch:
+ 1.8.x-rfourty-two
+ Votes:
+ +1: jrandom
+ """
+ entry = StatusEntry(s)
+ self.assertEqual(entry.branch, "1.8.x-rfourty-two")
+ self.assertEqual(entry.revisions, [42, 43, 44])
+ self.assertEqual(entry.logsummary, ["This is the logsummary."])
+ self.assertEqual(entry.logsummarysummary(), "This is the logsummary.")
+ self.assertFalse(entry.depends)
+ self.assertIsNone(entry.accept)
+ self.assertIn("+1: jrandom", entry.votes_str)
+ self.assertFalse(entry.is_vetoed())
+ self.assertEqual(entry.id(), "1.8.x-rfourty-two")
+ self.assertEqual(entry.noun(True), "The 1.8.x-rfourty-two branch")
+ self.assertEqual(entry.noun(), "the 1.8.x-rfourty-two branch")
+
+ s = """\
+ * r42
+ This is the logsummary.
+ It has multiple lines.
+ Depends: must be merged before the r43 entry"
+ Notes:
+ Merge with --accept=theirs-conflict.
+ Votes:
+ +1: jrandom
+ -1: jconstant
+ """
+ entry = StatusEntry(s)
+ self.assertIsNone(entry.branch)
+ self.assertEqual(entry.revisions, [42])
+ self.assertEqual(entry.logsummary,
+ ["This is the logsummary.",
+ "It has multiple lines."])
+ self.assertEqual(entry.logsummarysummary(),
+ "This is the logsummary. [...]")
+ self.assertTrue(entry.depends)
+ self.assertEqual(entry.accept, "theirs-conflict")
+ self.assertRegex(entry.votes_str, "(?s)jrandom.*jconstant") # re.DOTALL
+ self.assertTrue(entry.is_vetoed())
+ self.assertEqual(entry.id(), "r42")
+ self.assertEqual(entry.noun(), "r42")
+
+ s = """\
+ * ^/subversion/branches/1.8.x-fixes
+ This is the logsummary.
+ Votes:
+ +1: jrandom
+ -1 (see <message-id>): jconstant
+ """
+ entry = StatusEntry(s)
+ self.assertEqual(entry.branch, "1.8.x-fixes")
+ self.assertEqual(entry.revisions, [])
+ self.assertTrue(entry.is_vetoed())
+
+ s = """\
+ * r42
+ This is the logsummary.
+ Branch: ^/subversion/branches/on-the-same-line
+ Votes:
+ +1: jrandom
+ """
+ entry = StatusEntry(s)
+ self.assertEqual(entry.branch, "on-the-same-line")
+ self.assertEqual(entry.revisions, [42])
+
+ self.assertTrue(str(entry)) # tests __str__
+ self.assertEqual(entry.raw, s)
+
+ s = """\
+ * The 1.8.x-fixes branch
+ This is the logsummary.
+ Votes:
+ +1: jrandom
+ """
+ entry = StatusEntry(s)
+ self.assertEqual(entry.branch, "1.8.x-fixes")
+
+ s = """\
+ * The 1.8.x-fixes branch
+ This is the logsummary.
+ Notes: merge with --accept=tc.
+ Votes:
+ +1: jrandom
+ """
+ with self.assertRaisesRegex(ParseException, "both.*accept.*branch"):
+ entry = StatusEntry(s)
+
+ s = """\
+ * r42
+ Votes:
+ +1: jrandom
+ """
+ with self.assertRaisesRegex(ParseException, "No logsummary"):
+ entry = StatusEntry(s)
+
+ s = """\
+ * r42
+ This is the logsummary.
+ Notes: merge with --accept=mc.
+ This tests multi-line notes.
+ Merge with --accept=tc.
+ Votes:
+ +1: jrandom
+ """
+ with self.assertRaisesRegex(ParseException, "Too many.*--accept"):
+ entry = StatusEntry(s)
+
+ # logsummary that resembles a subheader
+ s = """\
+ * r42
+ svnversion: Fix typo in output.
+ Justification:
+ Fixes output that scripts depend on.
+ Votes:
+ +1: jrandom
+ """
+ entry = StatusEntry(s)
+ self.assertEqual(entry.revisions, [42])
+ self.assertEqual(entry.logsummary, ["svnversion: Fix typo in output."])
+
+ def test_digest(self):
+ s = """\
+ * r42
+ Fix a bug.
+ Votes:
+ +1: jrandom\n"""
+ digest = '92812e1f36a33f7d51670f89134ad2ee'
+ entry = StatusEntry(s)
+ self.assertEqual(entry.digest(), digest)
+
+ entry = StatusEntry(s + "\n\n\n")
+ self.assertEqual(entry.digest(), digest)
+
+ entry = StatusEntry(s.replace('Fix', 'Introduce'))
+ self.assertNotEqual(entry.digest(), digest)
+
+ def test_parse_branch(self):
+ inputs = (
+ "1.8.x-r42",
+ "branches/1.8.x-r42",
+ "branches/1.8.x-r42/",
+ "subversion/branches/1.8.x-r42",
+ "subversion/branches/1.8.x-r42/",
+ "^/subversion/branches/1.8.x-r42",
+ "^/subversion/branches/1.8.x-r42/",
+ )
+
+ for string in inputs:
+ self.assertEqual(StatusEntry.parse_branch(string), "1.8.x-r42")
+
+ def test__is_subheader(self):
+ "Test that all explicitly-special-cased headers are detected as subheaders."
+ subheaders = "Justification: Notes: Depends: Branch: Votes:".split()
+ for subheader in subheaders:
+ self.assertTrue(StatusEntry._is_subheader(subheader))
+ self.assertTrue(StatusEntry._is_subheader(subheader + " with value"))
+
+
+def setUpModule():
+ "Set-up function, invoked by 'python -m unittest'."
+ # Suppress warnings generated by the test data.
+ # TODO: some test functions assume .assertLogs is available, they fail with
+ # AttributeError if it's absent (e.g., on python < 3.4).
+ try:
+ unittest.TestCase.assertLogs
+ except AttributeError:
+ logger.setLevel(logging.ERROR)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/dist/backport_tests.py b/tools/dist/backport_tests.py
new file mode 100644
index 0000000..ec483a7
--- /dev/null
+++ b/tools/dist/backport_tests.py
@@ -0,0 +1,694 @@
+#!/usr/bin/env python
+# py:encoding=utf-8
+#
+# backport_tests.py: Test backport.pl or backport.py
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# We'd like to test backport.pl and backport.py the same way, and to reuse
+# the svntest Python harness. Since the latter standardizes argv parsing,
+# we can't use argv to determine whether .py or .pl should be tested. Thus,
+# we implement the tests themselves in this file, while two driver files
+# (backport_tests_pl.py and backport_tests_py.py) invoke this file set
+# to run either backport-suite implementation.
+#
+# ### Note: the two driver scripts use the same repository names in
+# ### svn-test-work. This is not ideal, but hopefully acceptable
+# ### temporarily until we switch over to backport.py and remove backport.pl.
+# ###
+# ### See svntest.testcase.FunctionTestCase.get_sandbox_name().
+try:
+ run_backport, run_conflicter
+except NameError:
+ raise Exception("Failure: %s should not be run directly, or the wrapper "
+ "does not define both run_backport() and run_conflicter()"
+ % __file__)
+
+# General modules
+import contextlib
+import functools
+import os
+import re
+import sys
+
+@contextlib.contextmanager
+def chdir(dir):
+ try:
+ saved_dir = os.getcwd()
+ os.chdir(dir)
+ yield
+ finally:
+ os.chdir(saved_dir)
+
+# Our testing module
+# HACK: chdir to cause svntest.main.svn_binary to be set correctly
+sys.path.insert(0, os.path.abspath('../../subversion/tests/cmdline'))
+with chdir('../../subversion/tests/cmdline'):
+ import svntest
+
+# (abbreviations)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+######################################################################
+# Helper functions
+
+STATUS = 'branch/STATUS'
+
+class BackportTest(object):
+ """Decorator. See self.__call__()."""
+
+ def __init__(self, uuid):
+ """The argument is the UUID embedded in the dump file.
+ If the argument is None, then there is no dump file."""
+ self.uuid = uuid
+
+ def __call__(self, test_func):
+ """Return a decorator that: builds TEST_FUNC's sbox, creates
+ ^/subversion/trunk, calls TEST_FUNC, and compares the resulting history
+ to the expected dump file, which is named after TEST_FUNC."""
+
+ # .wraps() propagates the wrappee's docstring to the wrapper.
+ @functools.wraps(test_func)
+ def wrapped_test_func(sbox):
+ expected_dump_file = './backport_tests_data/%s.dump' % (test_func.func_name,)
+
+ sbox.build()
+
+ # r2: prepare ^/subversion/ tree
+ sbox.simple_mkdir('subversion', 'subversion/trunk')
+ sbox.simple_mkdir('subversion/tags', 'subversion/branches')
+ sbox.simple_move('A', 'subversion/trunk')
+ sbox.simple_move('iota', 'subversion/trunk')
+ sbox.simple_commit(message='Create trunk')
+
+ # r3: branch
+ sbox.simple_copy('subversion/trunk', 'branch')
+ sbox.simple_append('branch/STATUS', '')
+ sbox.simple_add('branch/STATUS')
+ sbox.simple_commit(message='Create branch, with STATUS file')
+
+ # r4: random change on trunk
+ sbox.simple_append('subversion/trunk/iota', 'First change\n')
+ sbox.simple_commit(message='First change')
+
+ # r5: random change on trunk
+ sbox.simple_append('subversion/trunk/A/mu', 'Second change\n')
+ sbox.simple_commit(message='Second change')
+
+ # Do the work.
+ test_func(sbox)
+
+ # Verify it.
+ verify_backport(sbox, expected_dump_file, self.uuid)
+ return wrapped_test_func
+
+def make_entry(revisions=None, logsummary=None, notes=None, branch=None,
+ depends=None, votes=None):
+ assert revisions
+ if logsummary is None:
+ logsummary = "default logsummary"
+ if votes is None:
+ votes = {+1 : ['jrandom']}
+
+ entry = {
+ 'revisions': revisions,
+ 'logsummary': logsummary,
+ 'notes': notes,
+ 'branch': branch,
+ 'depends': depends,
+ 'votes': votes,
+ }
+
+ return entry
+
+def serialize_entry(entry):
+ return ''.join([
+
+ # revisions,
+ ' * %s\n'
+ % (", ".join("r%ld" % revision for revision in entry['revisions'])),
+
+ # logsummary
+ ' %s\n' % (entry['logsummary'],),
+
+ # notes
+ ' Notes: %s\n' % (entry['notes'],) if entry['notes'] else '',
+
+ # branch
+ ' Branch: %s\n' % (entry['branch'],) if entry['branch'] else '',
+
+ # depends
+ ' Depends: %s\n' % (entry['depends'],) if entry['depends'] else '',
+
+ # votes
+ ' Votes:\n',
+ ''.join(' '
+ '%s: %s\n' % ({1: '+1', 0: '+0', -1: '-1', -0: '-0'}[vote],
+ ", ".join(entry['votes'][vote]))
+ for vote in entry['votes']),
+
+ '\n', # empty line after entry
+ ])
+
+def serialize_STATUS(approveds,
+ candidates=[],
+ serialize_entry=serialize_entry):
+ """Construct and return the contents of a STATUS file.
+
+ APPROVEDS is an iterable of ENTRY dicts. The dicts are defined
+ to have the following keys: 'revisions', a list of revision numbers (ints);
+ 'logsummary'; and 'votes', a dict mapping ±1/±0 (int) to list of voters.
+
+ CANDIDATES is like APPROVEDS, except added to a different section of the file.
+ """
+
+ strings = []
+ strings.append("Status of 1.8.x:\n\n")
+
+ strings.append("Candidate changes:\n")
+ strings.append("==================\n\n")
+
+ strings.extend(map(serialize_entry, candidates))
+
+ strings.append("Random new subheading:\n")
+ strings.append("======================\n\n")
+
+ strings.append("Veto-blocked changes:\n")
+ strings.append("=====================\n\n")
+
+ strings.append("Approved changes:\n")
+ strings.append("=================\n\n")
+
+ strings.extend(map(serialize_entry, approveds))
+
+ return "".join(strings)
+
+def verify_backport(sbox, expected_dump_file, uuid):
+ """Compare the contents of the SBOX repository with EXPECTED_DUMP_FILE.
+ Set the UUID of SBOX to UUID beforehand.
+ Based on svnsync_tests.py:verify_mirror."""
+
+ if uuid is None:
+ # There is no expected dump file.
+ return
+
+ # Remove some SVNSync-specific housekeeping properties from the
+ # mirror repository in preparation for the comparison dump.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ for revnum in range(0, 1+int(sbox.youngest())):
+ svntest.actions.run_and_verify_svnadmin([], [],
+ "delrevprop", "-r", revnum, sbox.repo_dir, "svn:date")
+
+ # Create a dump file from the mirror repository.
+ dest_dump = open(expected_dump_file).readlines()
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'setuuid', '--', sbox.repo_dir, uuid)
+ src_dump = svntest.actions.run_and_verify_dump(sbox.repo_dir)
+
+ svntest.verify.compare_dump_files(
+ "Dump files", "DUMP", dest_dump, src_dump)
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000001')
+def backport_indented_entry(sbox):
+ "parsing of entries with nonstandard indentation"
+
+ # r6: nominate r4
+ approved_entries = [
+ make_entry([4]),
+ ]
+ def reindenting_serialize_entry(*args, **kwargs):
+ entry = serialize_entry(*args, **kwargs)
+ return ('\n' + entry).replace('\n ', '\n')[1:]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries,
+ serialize_entry=reindenting_serialize_entry))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000002')
+def backport_two_approveds(sbox):
+ "backport with two approveds"
+
+ # r6: Enter votes
+ approved_entries = [
+ make_entry([4]),
+ make_entry([5]),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4. Nominate r5.')
+
+ # r7, r8: Run it.
+ run_backport(sbox)
+
+ # Now back up and do three entries.
+ # r9: revert r7, r8
+ svntest.actions.run_and_verify_svnlook(["8\n"], [],
+ 'youngest', sbox.repo_dir)
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '-r8:6',
+ '^/branch', sbox.ospath('branch'))
+ sbox.simple_commit(message='Revert the merges.')
+
+ # r10: Another change on trunk.
+ # (Note that this change must be merged after r5.)
+ sbox.simple_rm('subversion/trunk/A')
+ sbox.simple_commit(message='Third change on trunk.')
+
+ # r11: Nominate r10.
+ sbox.simple_append(STATUS, serialize_entry(make_entry([10])))
+ sbox.simple_commit(message='Nominate r10.')
+
+ # r12, r13, r14: Run it.
+ run_backport(sbox)
+
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000003')
+def backport_accept(sbox):
+ "test --accept parsing"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change\n')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: nominate r4 with --accept (because of r6)
+ approved_entries = [
+ make_entry([4], notes="Merge with --accept=theirs-conflict."),
+ ]
+ def reindenting_serialize_entry(*args, **kwargs):
+ entry = serialize_entry(*args, **kwargs)
+ return ('\n' + entry).replace('\n ', '\n')[1:]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries,
+ serialize_entry=reindenting_serialize_entry))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000004')
+def backport_branches(sbox):
+ "test branches"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: backport branch
+ sbox.simple_update()
+ sbox.simple_copy('branch', 'subversion/branches/r4')
+ sbox.simple_commit(message='Create a backport branch')
+
+ # r8: merge into backport branch
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '--record-only', '-c4',
+ '^/subversion/trunk', sbox.ospath('subversion/branches/r4'))
+ sbox.simple_mkdir('subversion/branches/r4/A_resolved')
+ sbox.simple_append('subversion/branches/r4/iota', "resolved\n", truncate=1)
+ sbox.simple_commit(message='Conflict resolution via mkdir')
+
+ # r9: nominate r4 with branch
+ approved_entries = [
+ make_entry([4], branch="r4")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+ # This also serves as the 'success mode' part of backport_branch_contains().
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000005')
+def backport_multirevisions(sbox):
+ "test multirevision entries"
+
+ # r6: nominate r4,r5
+ approved_entries = [
+ make_entry([4,5])
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate a group.')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000006
+def backport_conflicts_detection(sbox):
+ "test the conflicts detector"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change\n')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: nominate r4, but without the requisite --accept
+ candidate_entries = [
+ make_entry([4], notes="This will conflict."),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS([], candidate_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ exit_code, output, errput = run_conflicter(sbox, True)
+
+ # Verify the conflict is detected.
+ expected_output = svntest.verify.RegexOutput(
+ 'Index: iota',
+ match_all=False,
+ )
+ expected_errput = (
+ r'(?ms)' # re.MULTILINE | re.DOTALL
+ r'.*Warning summary.*'
+ r'^r4 [(]default logsummary[)]: Conflicts on iota.*'
+ )
+ expected_errput = svntest.verify.RegexListOutput(
+ [
+ r'Warning summary',
+ r'===============',
+ r'r4 [(]default logsummary[)]: Conflicts on iota',
+ ],
+ match_all=False)
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_output, expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+
+ ## Now, let's test the "Depends:" annotation silences the error.
+
+ # Re-nominate.
+ approved_entries = [
+ make_entry([4], depends="World peace."),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries), truncate=True)
+ sbox.simple_commit(message='Re-nominate r4')
+
+ # Detect conflicts.
+ exit_code, output, errput = run_conflicter(sbox)
+
+ # Verify stdout. (exit_code and errput were verified by run_conflicter().)
+ svntest.verify.verify_outputs(None, output, errput,
+ "Conflicts found.*, as expected.", [])
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000007
+def backport_branch_contains(sbox):
+ "branch must contain the revisions"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: backport branch
+ sbox.simple_update()
+ sbox.simple_copy('branch', 'subversion/branches/r4')
+ sbox.simple_commit(message='Create a backport branch')
+
+ # r8: merge into backport branch
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '--record-only', '-c4',
+ '^/subversion/trunk', sbox.ospath('subversion/branches/r4'))
+ sbox.simple_mkdir('subversion/branches/r4/A_resolved')
+ sbox.simple_append('subversion/branches/r4/iota', "resolved\n", truncate=1)
+ sbox.simple_commit(message='Conflict resolution via mkdir')
+
+ # r9: nominate r4,r5 with branch that contains not all of them
+ approved_entries = [
+ make_entry([4,5], branch="r4")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ exit_code, output, errput = run_backport(sbox, error_expected=True)
+
+ # Verify the error message.
+ expected_errput = svntest.verify.RegexOutput(
+ ".*Revisions 'r5' nominated but not included in branch",
+ match_all=False,
+ )
+ svntest.verify.verify_outputs(None, output, errput,
+ [], expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+
+ # Verify no commit occurred.
+ svntest.actions.run_and_verify_svnlook(["9\n"], [],
+ 'youngest', sbox.repo_dir)
+
+ # Verify the working copy has been reverted.
+ svntest.actions.run_and_verify_svn([], [], 'status', '-q',
+ sbox.repo_dir)
+
+ # The sibling test backport_branches() verifies the success mode.
+
+
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000008
+def backport_double_conflict(sbox):
+ "two-revisioned entry with two conflicts"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: further conflicting change to same file
+ sbox.simple_update()
+ sbox.simple_append('subversion/trunk/iota', 'Third line\n')
+ sbox.simple_commit(message="iota's third line")
+
+ # r8: nominate
+ approved_entries = [
+ make_entry([4,7], depends="World peace.")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate the r4 group')
+
+ # Run it, in conflicts mode.
+ exit_code, output, errput = run_conflicter(sbox, True)
+
+ # Verify the failure mode: "merge conflict" error on stderr, but backport.pl
+ # itself exits with code 0, since conflicts were confined to Depends:-ed
+ # entries.
+ #
+ # The error only happens with multi-pass merges where the first pass
+ # conflicts and the second pass touches the conflict victim.
+ #
+ # The error would be:
+ # subversion/libsvn_client/merge.c:5499: (apr_err=SVN_ERR_WC_FOUND_CONFLICT)
+ # svn: E155015: One or more conflicts were produced while merging r3:4
+ # into '/tmp/stw/working_copies/backport_tests-8/branch' -- resolve all
+ # conflicts and rerun the merge to apply the remaining unmerged revisions
+ # ...
+ # Warning summary
+ # ===============
+ #
+ # r4 (default logsummary): subshell exited with code 256
+ # And backport.pl would exit with exit code 1.
+
+ expected_output = 'Conflicts found.*, as expected.'
+ expected_errput = svntest.verify.RegexOutput(
+ ".*svn: E155015:.*", # SVN_ERR_WC_FOUND_CONFLICT
+ match_all=False,
+ )
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_output, expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+ if any("Warning summary" in line for line in errput):
+ raise svntest.verify.SVNUnexpectedStderr(errput)
+
+ ## Now, let's ensure this does get detected if not silenced.
+ # r9: Re-nominate
+ approved_entries = [
+ make_entry([4,7]) # no depends=
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries), truncate=True)
+ sbox.simple_commit(message='Re-nominate the r4 group')
+
+ exit_code, output, errput = run_conflicter(sbox, True)
+
+ ## An unexpected non-zero exit code is treated as a fatal error.
+ # [1-9]\d+ matches non-zero exit codes
+ expected_stdout = None
+ expected_errput = r'r4 .*: subshell exited with code (?:[1-9]\d+)' \
+ r"|.*subprocess.CalledProcessError.*'merge'.*exit status 1"
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_stdout, expected_errput)
+
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000009')
+def backport_branch_with_original_revision(sbox):
+ "branch with original revision"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: backport branch
+ sbox.simple_update()
+ sbox.simple_copy('branch', 'subversion/branches/r4')
+ sbox.simple_commit(message='Create a backport branch')
+
+ # r8: merge into backport branch
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '--record-only', '-c4',
+ '^/subversion/trunk', sbox.ospath('subversion/branches/r4'))
+ sbox.simple_mkdir('subversion/branches/r4/A_resolved')
+ sbox.simple_append('subversion/branches/r4/iota', "resolved\n", truncate=1)
+ sbox.simple_commit(message='Conflict resolution via mkdir')
+
+ # r9: original revision on branch
+ sbox.simple_update()
+ sbox.simple_mkdir('subversion/branches/r4/dir-created-on-backport-branch')
+ sbox.simple_commit(message='An original revision on the backport branch')
+
+ # r10: nominate the branch with r9 listed
+ approved_entries = [
+ make_entry([4, 9], branch="r4")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4+r9')
+
+ # r11, r12: Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest(None)
+def backport_otherproject_change(sbox):
+ "inoperative revision"
+
+ # r6: a change outside ^/subversion
+ sbox.simple_mkdir('elsewhere')
+ sbox.simple_commit()
+
+ # r7: Nominate r6 by mistake
+ approved_entries = [
+ make_entry([6])
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r6 by mistake')
+
+ # Run it.
+ exit_code, output, errput = run_backport(sbox, error_expected=True)
+
+ # Verify no commit occurred.
+ svntest.actions.run_and_verify_svnlook(["7\n"], [],
+ 'youngest', sbox.repo_dir)
+
+ # Verify the failure mode.
+ expected_stdout = None
+ expected_stderr = ".*only svn:mergeinfo changes.*"
+ if exit_code == 0:
+ # Can't use verify_exit_code() since the exact code used varies.
+ raise svntest.Failure("exit_code should be non-zero")
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_stdout, expected_stderr)
+
+#----------------------------------------------------------------------
+@BackportTest(None)
+def backport_STATUS_mods(sbox):
+ "local mods to STATUS"
+
+ # Introduce a local mod.
+ sbox.simple_append(STATUS, "\n")
+
+ exit_code, output, errput = run_backport(sbox, error_expected=True)
+ expected_stdout = None
+ expected_stderr = ".*Local mods.*STATUS.*"
+ if exit_code == 0:
+ # Can't use verify_exit_code() since the exact code used varies.
+ raise svntest.Failure("exit_code should be non-zero")
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_stdout, expected_stderr)
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000012')
+def backport_unicode_entry(sbox):
+ "an entry containing literal UTF-8"
+
+ # r6: nominate r4
+ approved_entries = [
+ make_entry([4], notes="Hello 🗺"),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ backport_indented_entry,
+ backport_two_approveds,
+ backport_accept,
+ backport_branches,
+ backport_multirevisions,
+ backport_conflicts_detection,
+ backport_branch_contains,
+ backport_double_conflict,
+ backport_branch_with_original_revision,
+ backport_otherproject_change,
+ backport_STATUS_mods,
+ backport_unicode_entry,
+ # When adding a new test, include the test number in the last
+ # 6 bytes of the UUID, in decimal.
+ ]
+
+if __name__ == '__main__':
+ # Using putenv() here is fine because this file is never run as a module.
+ os.putenv('SVN_BACKPORT_DONT_SLEEP', '1')
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/tools/dist/backport_tests_data/backport_accept.dump b/tools/dist/backport_tests_data/backport_accept.dump
new file mode 100644
index 0000000..9532dc3
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_accept.dump
@@ -0,0 +1,550 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000003
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Conflicting change on iota
+PROPS-END
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 53
+Text-content-md5: 0c42f8c8b103bf00045cdf514238cfab
+Text-content-sha1: 440ad0a1673258aea8ba78fef0845e182757f8f9
+Content-length: 53
+
+This is the file 'iota'.
+Conflicts with first change
+
+
+Revision-number: 7
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 284
+Text-content-md5: f1f6d73c681587eba4082139a9f2b724
+Text-content-sha1: 251bb84036790a810b1f4cc7f7a4e64c6a54ce9b
+Content-length: 284
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+* r4
+ default logsummary
+ Notes: Merge with --accept=theirs-conflict.
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 8
+Prop-content-length: 206
+Content-length: 206
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 150
+Merge r4 from trunk, with --accept=theirs-conflict:
+
+* r4
+ default logsummary
+ Notes: Merge with --accept=theirs-conflict.
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_tests_data/backport_branch_with_original_revision.dump b/tools/dist/backport_tests_data/backport_branch_with_original_revision.dump
new file mode 100644
index 0000000..506603e
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_branch_with_original_revision.dump
@@ -0,0 +1,672 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000009
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Prop-content-length: 10
+Text-content-length: 25
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Prop-content-length: 10
+Text-content-length: 27
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Prop-content-length: 10
+Text-content-length: 23
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Prop-content-length: 10
+Text-content-length: 23
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Prop-content-length: 10
+Text-content-length: 25
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Prop-content-length: 10
+Text-content-length: 0
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Text-content-length: 38
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Text-content-length: 37
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Conflicting change on iota
+PROPS-END
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 2309abeef2762865a65aef15a23bd613
+Text-content-sha1: d3339d12dee6df117675e9abf30ebfa1a1dde889
+Text-content-length: 52
+Content-length: 52
+
+This is the file 'iota'.
+Conflicts with first change
+
+Revision-number: 7
+Prop-content-length: 80
+Content-length: 80
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 24
+Create a backport branch
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: branch
+
+
+Revision-number: 8
+Prop-content-length: 85
+Content-length: 85
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 29
+Conflict resolution via mkdir
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: subversion/branches/r4/A_resolved
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches/r4/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Text-content-length: 9
+Content-length: 9
+
+resolved
+
+
+Revision-number: 9
+Prop-content-length: 99
+Content-length: 99
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 43
+An original revision on the backport branch
+PROPS-END
+
+Node-path: subversion/branches/r4/dir-created-on-backport-branch
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 10
+Prop-content-length: 70
+Content-length: 70
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 14
+Nominate r4+r9
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-md5: 608287647ed261a4e96f6eb9aec989d5
+Text-content-sha1: b790dd7d918d01cebdfbb3c055d8f10d4252f0ae
+Text-content-length: 260
+Content-length: 260
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4, r9
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 11
+Prop-content-length: 150
+Content-length: 150
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 95
+Merge the r4 branch:
+
+ * r4, r9
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 83
+Content-length: 83
+
+K 13
+svn:mergeinfo
+V 48
+/subversion/branches/r4:7-10
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/A_resolved
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 10
+Node-copyfrom-path: subversion/branches/r4/A_resolved
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Text-content-length: 185
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/dir-created-on-backport-branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 10
+Node-copyfrom-path: subversion/branches/r4/dir-created-on-backport-branch
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Text-content-length: 9
+Content-length: 9
+
+resolved
+
+
+Revision-number: 12
+Prop-content-length: 93
+Content-length: 93
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 38
+Remove the 'r4' branch, merged in r11.
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-action: delete
+
+
diff --git a/tools/dist/backport_tests_data/backport_branches.dump b/tools/dist/backport_tests_data/backport_branches.dump
new file mode 100644
index 0000000..de6c800
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_branches.dump
@@ -0,0 +1,642 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000004
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Conflicting change on iota
+PROPS-END
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 52
+Text-content-md5: 2309abeef2762865a65aef15a23bd613
+Text-content-sha1: d3339d12dee6df117675e9abf30ebfa1a1dde889
+Content-length: 52
+
+This is the file 'iota'.
+Conflicts with first change
+
+Revision-number: 7
+Prop-content-length: 80
+Content-length: 80
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 24
+Create a backport branch
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: branch
+
+
+Revision-number: 8
+Prop-content-length: 85
+Content-length: 85
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 29
+Conflict resolution via mkdir
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: subversion/branches/r4/A_resolved
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches/r4/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Content-length: 9
+
+resolved
+
+
+Revision-number: 9
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 256
+Text-content-md5: 76f9bca3ededa2eb3c196ef0bbc9ee1b
+Text-content-sha1: 283a9f7ec716dc64b5ec8e5e1d9739d55e34b2d5
+Content-length: 256
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 10
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 91
+Merge the r4 branch:
+
+ * r4
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 82
+Content-length: 82
+
+K 13
+svn:mergeinfo
+V 47
+/subversion/branches/r4:7-9
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/A_resolved
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 9
+Node-copyfrom-path: subversion/branches/r4/A_resolved
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Content-length: 9
+
+resolved
+
+
+Revision-number: 11
+Prop-content-length: 93
+Content-length: 93
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 38
+Remove the 'r4' branch, merged in r10.
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-action: delete
+
+
diff --git a/tools/dist/backport_tests_data/backport_indented_entry.dump b/tools/dist/backport_tests_data/backport_indented_entry.dump
new file mode 100644
index 0000000..bbc501d
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_indented_entry.dump
@@ -0,0 +1,522 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000001
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 238
+Text-content-md5: d746b12362ddd59c13d39f291710b25b
+Text-content-sha1: aafcdde209c276ffd2d63d6cd4c4b5ab35b36c27
+Content-length: 238
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+* r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 128
+Content-length: 128
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 73
+Merge r4 from trunk:
+
+* r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_tests_data/backport_multirevisions.dump b/tools/dist/backport_tests_data/backport_multirevisions.dump
new file mode 100644
index 0000000..d04c850
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_multirevisions.dump
@@ -0,0 +1,534 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000005
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 73
+Content-length: 73
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 17
+Nominate a group.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 246
+Text-content-md5: 50068058cd9700828164f97c8bc9e44e
+Text-content-sha1: 02f8ed7e3256e1eabd302b8f5b6e35000e2d4ce8
+Content-length: 246
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4, r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 91
+Merge the r4 group from trunk:
+
+ * r4, r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_tests_data/backport_two_approveds.dump b/tools/dist/backport_tests_data/backport_two_approveds.dump
new file mode 100644
index 0000000..c4349b2
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_two_approveds.dump
@@ -0,0 +1,961 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000002
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Nominate r4. Nominate r5.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 4ebc11d7e1ec3a5cb75d3cfdcf0c1399
+Text-content-sha1: 86dd246b9072d6baeaac50f58ee2fa6444f6f889
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r4 from trunk:
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 241
+Text-content-md5: cd8d55451e22cd8f83599bc64e67b515
+Text-content-sha1: 6b54b54b2711d0de2f252f34c26f2ac8f222ce35
+Content-length: 241
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 8
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r5 from trunk:
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Revision-number: 9
+Prop-content-length: 74
+Content-length: 74
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 18
+Revert the merges.
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 23
+
+This is the file 'mu'.
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 4ebc11d7e1ec3a5cb75d3cfdcf0c1399
+Text-content-sha1: 86dd246b9072d6baeaac50f58ee2fa6444f6f889
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 25
+
+This is the file 'iota'.
+
+
+Revision-number: 10
+Prop-content-length: 78
+Content-length: 78
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 22
+Third change on trunk.
+PROPS-END
+
+Node-path: subversion/trunk/A
+Node-action: delete
+
+
+Revision-number: 11
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Nominate r10.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 355
+Text-content-md5: cc8dd910efc8d555f5dc51e5c331b403
+Text-content-sha1: c67ec7e762d8f7dfa6d2b876e540a6038781171f
+Content-length: 355
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 12
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r4 from trunk:
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 41e1f764781ee0b7874dc92607e9b9f6
+Text-content-sha1: 19e57ad83073cc50d86033ab0f03d3b8574c68fc
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 13
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r5 from trunk:
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 242
+Text-content-md5: 30f964a922fe4e9f01b25a274c0a8efb
+Text-content-sha1: f1180ea711cbbbbfb2af52cac509da15313ca319
+Content-length: 242
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Revision-number: 14
+Prop-content-length: 134
+Content-length: 134
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 79
+Merge r10 from trunk:
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 59
+Content-length: 59
+
+K 13
+svn:mergeinfo
+V 24
+/subversion/trunk:4-5,10
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/A
+Node-action: delete
+
+
diff --git a/tools/dist/backport_tests_data/backport_unicode_entry.dump b/tools/dist/backport_tests_data/backport_unicode_entry.dump
new file mode 100644
index 0000000..03a50f9
--- /dev/null
+++ b/tools/dist/backport_tests_data/backport_unicode_entry.dump
@@ -0,0 +1,524 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000012
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Prop-content-length: 10
+Text-content-length: 25
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Prop-content-length: 10
+Text-content-length: 27
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Prop-content-length: 10
+Text-content-length: 23
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Prop-content-length: 10
+Text-content-length: 24
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Prop-content-length: 10
+Text-content-length: 26
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Prop-content-length: 10
+Text-content-length: 23
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Prop-content-length: 10
+Text-content-length: 25
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Prop-content-length: 10
+Text-content-length: 0
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Text-content-length: 38
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Text-content-length: 37
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-md5: d231ec40c1bf173586fb850acb0df85e
+Text-content-sha1: 22e99a1d851ed1e4b5dfed1f18934b039d7f8c22
+Text-content-length: 263
+Content-length: 263
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Notes: Hello 🗺
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 153
+Content-length: 153
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 98
+Merge r4 from trunk:
+
+ * r4
+ default logsummary
+ Notes: Hello 🗺
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Text-content-length: 185
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Text-content-length: 38
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_tests_pl.py b/tools/dist/backport_tests_pl.py
new file mode 100755
index 0000000..24d0a41
--- /dev/null
+++ b/tools/dist/backport_tests_pl.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#
+# backport_tests_pl.py: Test backport.pl
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os
+
+BACKPORT_PL = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'backport.pl'))
+
+def run_backport(sbox, error_expected=False, extra_env=[]):
+ """Run backport.pl. EXTRA_ENV is a list of key=value pairs (str) to set in
+ the child's environment. ERROR_EXPECTED is propagated to run_command()."""
+ # TODO: if the test is run in verbose mode, pass DEBUG=1 in the environment,
+ # and pass error_expected=True to run_command() to not croak on
+ # stderr output from the child (because it uses 'sh -x').
+ args = [
+ '/usr/bin/env',
+ 'SVN=' + svntest.main.svn_binary,
+ 'YES=1', 'MAY_COMMIT=1', 'AVAILID=jrandom',
+ ] + list(extra_env) + [
+ 'perl', BACKPORT_PL,
+ ]
+ with chdir(sbox.ospath('branch')):
+ return svntest.main.run_command(args[0], error_expected, False, *(args[1:]))
+
+def run_conflicter(sbox, error_expected=False):
+ "Run the conflicts detector. See run_backport() for arguments."
+ return run_backport(sbox, error_expected, ["MAY_COMMIT=0"])
+
+
+execfile("backport_tests.py")
diff --git a/tools/dist/backport_tests_py.py b/tools/dist/backport_tests_py.py
new file mode 100755
index 0000000..4c8bfec
--- /dev/null
+++ b/tools/dist/backport_tests_py.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# backport_tests_py.py: Test backport.py
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+import os
+
+APPROVED_PY = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'merge-approved-backports.py'))
+CONFLICTER_PY = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'detect-conflicting-backports.py'))
+
+def run_backport(sbox, error_expected=False):
+ "Run the backport.py auto-merger."
+ args = [
+ '/usr/bin/env',
+ 'SVN=' + svntest.main.svn_binary,
+ 'python3', APPROVED_PY,
+ ]
+ with chdir(sbox.ospath('branch')):
+ return svntest.main.run_command(args[0], error_expected, False, *(args[1:]))
+
+def run_conflicter(sbox, error_expected=False):
+ "Run the backport.py conflicts detector."
+ args = [
+ '/usr/bin/env',
+ 'SVN=' + svntest.main.svn_binary,
+ 'python3', CONFLICTER_PY,
+ ]
+ with chdir(sbox.ospath('branch')):
+ return svntest.main.run_command(args[0], error_expected, False, *(args[1:]))
+
+execfile("backport_tests.py")
diff --git a/tools/dist/checksums.py b/tools/dist/checksums.py
new file mode 100755
index 0000000..6ac7231
--- /dev/null
+++ b/tools/dist/checksums.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# Check MD5 and SHA-1 and SHA-2 signatures of files, using
+# md5sums, sha1sums, and/or sha512sums as manifests
+# Replaces the 'md5sum', 'sha1sum', and 'sha512sums' commands
+# on systems that do not have them, such as Mac OS X or Windows.
+#
+# Usage: checksums.py [manifest]
+# where "os.path.basename(manifest)" is either "md5sums", "sha1sums",
+# "sha512sums"
+#
+# Tested with the following Python versions:
+# 2.4 2.5 2.6 2.7 3.2
+
+
+import os
+import shutil
+import sys
+
+try:
+ from hashlib import md5
+ from hashlib import sha1
+ from hashlib import sha512
+except ImportError:
+ from md5 import md5
+ from sha import sha as sha1
+
+
+class Digester(object):
+ BUFFER_SIZE = 1024*1024
+
+ def __init__(self, factory):
+ self.factory = factory
+ self.digest_size = factory().digest_size
+ self.hashfunc = None
+
+ def reset(self):
+ self.hashfunc = self.factory()
+
+ def write(self, data):
+ return self.hashfunc.update(data)
+
+ def hexdigest(self):
+ return self.hashfunc.hexdigest()
+
+
+def main(manipath):
+ basedir, manifest = os.path.split(manipath)
+
+ if manifest == 'md5sums':
+ sink = Digester(md5)
+ elif manifest == 'sha1sums':
+ sink = Digester(sha1)
+ elif manifest == 'sha512sums':
+ sink = Digester(sha512)
+ else:
+ raise ValueError('The name of the digest manifest must be '
+ "'md5sums', 'sha1sums', or 'sha512sums', not '%s'" % manifest)
+
+ # No 'with' statement in Python 2.4 ...
+ stream = None
+ try:
+ stream = open(manipath, 'r')
+ for line in stream:
+ sink.reset()
+ parse_digest(basedir, line.rstrip(), sink)
+ finally:
+ if stream is not None:
+ stream.close()
+
+
+def parse_digest(basedir, entry, sink):
+ length = 2 * sink.digest_size
+ expected = entry[:length].lower()
+ filename = entry[length + 2:]
+
+ # Still no 'with' statement in Python 2.4 ...
+ source = None
+ try:
+ source = open(os.path.join(basedir, filename), 'rb')
+ shutil.copyfileobj(source, sink, sink.BUFFER_SIZE)
+ actual = sink.hexdigest().lower()
+ finally:
+ if source is not None:
+ source.close()
+
+ if expected != actual:
+ raise ValueError('Mismatch: expected %s, actual %s: %s'
+ % (expected, actual, filename))
+ print('ok: %s %s' % (actual, filename))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1])
diff --git a/tools/dist/detect-conflicting-backports.py b/tools/dist/detect-conflicting-backports.py
new file mode 100755
index 0000000..e9d00e5
--- /dev/null
+++ b/tools/dist/detect-conflicting-backports.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""\
+Conflicts detector script.
+
+This script is used by buildbot.
+
+Run this script from the root of a stable branch's working copy (e.g.,
+a working copy of /branches/1.9.x). This script will iterate the STATUS file,
+attempt to merge each entry therein (nothing will be committed), and exit
+non-zero if any merge produced a conflict.
+
+
+Conflicts caused by entry interdependencies
+-------------------------------------------
+
+Occasionally, a nomination is added to STATUS that is expected to conflict (for
+example, because it textually depends on another revision that is also
+nominated). To prevent false positive failures in such cases, the dependent
+entry may be annotated by a "Depends:" header, to signal to this script that
+the conflict is expected. Expected conflicts never cause a non-zero exit code.
+
+A "Depends:" header looks as follows:
+
+ * r42
+ Make some change.
+ Depends:
+ Requires the r40 group to be merged first.
+ Votes:
+ +1: jrandom
+
+The value of the header is not parsed; the script only cares about its presence
+of absence.
+"""
+
+import sys
+assert sys.version_info[0] == 3, "This script targets Python 3"
+
+import backport.status
+import backport.merger
+
+import collections
+import logging
+import re
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+if sys.argv[1:]:
+ # Usage.
+ print(__doc__)
+ sys.exit(0)
+
+backport.merger.no_local_mods('./STATUS')
+sf = backport.status.StatusFile(open('./STATUS', encoding="UTF-8"))
+
+ERRORS = collections.defaultdict(list)
+
+# Main loop.
+for entry_para in sf.entries_paras():
+ entry = entry_para.entry()
+ # SVN_ERR_WC_FOUND_CONFLICT = 155015
+ backport.merger.run_svn_quiet(['update']) # TODO: what to do if this pulls in a STATUS mod?
+ backport.merger.merge(entry, 'svn: E155015' if entry.depends else None)
+
+ _, output, _ = backport.merger.run_svn(['status'])
+
+ # Pre-1.6 svn's don't have the 7th column, so fake it.
+ if backport.merger.svn_version() < (1,6):
+ output = re.compile('^(......)', re.MULTILINE).sub(r'\1 ', output)
+
+ pattern = re.compile(r'(?:C......|.C.....|......C)\s(.*)', re.MULTILINE)
+ conflicts = pattern.findall(output)
+ if conflicts and not entry.depends:
+ if len(conflicts) == 1:
+ victims = conflicts[0]
+ else:
+ victims = '[{}]'.format(', '.join(conflicts))
+ ERRORS[entry].append("Conflicts on {}".format(victims))
+ sys.stderr.write(
+ "Conflicts merging {}!\n"
+ "\n"
+ "{}\n"
+ .format(entry.noun(), output)
+ )
+ subprocess.check_call([backport.merger.SVN, 'diff', '--'] + conflicts)
+ elif entry.depends and not conflicts:
+ # Not a warning since svn-role may commit the dependency without
+ # also committing the dependent in the same pass.
+ print("No conflicts merging {}, but conflicts were "
+ "expected ('Depends:' header set)".format(entry.noun()))
+ elif conflicts:
+ print("Conflicts found merging {}, as expected.".format(entry.noun()))
+ backport.merger.run_revert()
+
+# Summarize errors before exiting.
+if ERRORS:
+ warn = sys.stderr.write
+ warn("Warning summary\n")
+ warn("===============\n");
+ warn("\n");
+ for entry, warnings in ERRORS.items():
+ for warning in warnings:
+ title = entry.logsummarysummary()
+ warn('{} ({}): {}\n'.format(entry.id(), title, warning))
+ sys.exit(1)
diff --git a/tools/dist/dist.sh b/tools/dist/dist.sh
new file mode 100755
index 0000000..a1e0daf
--- /dev/null
+++ b/tools/dist/dist.sh
@@ -0,0 +1,416 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# USAGE: ./dist.sh -v VERSION -r REVISION -pr REPOS-PATH
+# [-alpha ALPHA_NUM|-beta BETA_NUM|-rc RC_NUM|-pre PRE_NUM]
+# [-apr PATH-TO-APR ] [-apru PATH-TO-APR-UTIL]
+# [-apri PATH-TO-APR-ICONV] [-neon PATH-TO-NEON]
+# [-serf PATH-TO-SERF] [-zlib PATH-TO-ZLIB]
+# [-sqlite PATH-TO-SQLITE] [-zip] [-sign]
+#
+# Create a distribution tarball, labelling it with the given VERSION.
+# The tarball will be constructed from the root located at REPOS-PATH,
+# in REVISION. For example, the command line:
+#
+# ./dist.sh -v 1.4.0 -r ????? -pr branches/1.4.x
+#
+# will create a 1.4.0 release tarball. Make sure you have apr,
+# apr-util, neon, serf, zlib and sqlite subdirectories in your current
+# working directory or specify the path to them with the -apr, -apru,
+# -neon or -zlib options. For example:
+# ./dist.sh -v 1.4.0 -r ????? -pr branches/1.4.x \
+# -apr ~/in-tree-libraries/apr-0.9.12 \
+# -apru ~/in-tree-libraries/apr-util-0.9.12 \
+# -neon ~/in-tree-libraries/neon-0.25.5 \
+# -zlib ~/in-tree-libraries/zlib-1.2.3
+#
+# Note that there is _no_ need to run dist.sh from a Subversion
+# working copy, so you may wish to create a dist-resources directory
+# containing the apr/, apr-util/, neon/, serf/, zlib/ and sqlite/
+# dependencies, and run dist.sh from that.
+#
+# When building alpha, beta or rc tarballs pass the appropriate flag
+# followed by a number. For example "-alpha 5", "-beta 3", "-rc 2".
+#
+# If neither an -alpha, -beta, -pre or -rc option is specified, a release
+# tarball will be built.
+#
+# To build a Windows zip file package, additionally pass -zip and the
+# path to apr-iconv with -apri.
+
+set -e
+
+USAGE="USAGE: ./dist.sh -v VERSION -r REVISION -pr REPOS-PATH \
+[-alpha ALPHA_NUM|-beta BETA_NUM|-rc RC_NUM|-pre PRE_NUM] \
+[-apr APR_PATH ] [-apru APR_UTIL_PATH] [-apri APR_ICONV_PATH] \
+[-neon NEON_PATH ] [-serf SERF_PATH] [-zlib ZLIB_PATH] \
+[-sqlite SQLITE_PATH] [-zip] [-sign]
+ EXAMPLES: ./dist.sh -v 0.36.0 -r 8278 -pr branches/foo
+ ./dist.sh -v 0.36.0 -r 8278 -pr trunk
+ ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0
+ ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -alpha 1
+ ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -beta 1
+ ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -pre 1
+ ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -nightly r8282"
+
+# Let's check and set all the arguments
+ARG_PREV=""
+
+for ARG in $@
+do
+ if [ -n "$ARG_PREV" ]; then
+ case $ARG_PREV in
+ -v) VERSION="$ARG" ;;
+ -r) REVISION="$ARG" ;;
+ -pr) REPOS_PATH="$ARG" ;;
+ -alpha) ALPHA="$ARG" ;;
+ -beta) BETA="$ARG" ;;
+ -pre) PRE="$ARG" ;;
+ -nightly) NIGHTLY="$ARG" ;;
+ -rc) RC="$ARG" ;;
+ -apr) APR_PATH="$ARG" ;;
+ -apru) APRU_PATH="$ARG" ;;
+ -apri) APRI_PATH="$ARG" ;;
+ -zlib) ZLIB_PATH="$ARG" ;;
+ -sqlite) SQLITE_PATH="$ARG" ;;
+ -neon) NEON_PATH="$ARG" ;;
+ -serf) SERF_PATH="$ARG" ;;
+ esac
+ ARG_PREV=""
+ else
+ case $ARG in
+ -v|-r|-rs|-pr|-alpha|-beta|-pre|-rc|-apr|-apru|-apri|-zlib|-sqlite|-neon|-serf|-nightly)
+ ARG_PREV=$ARG
+ ;;
+ -zip) ZIP=1 ;;
+ -sign) SIGN=1 ;;
+ *)
+ echo " $USAGE"
+ exit 1
+ ;;
+ esac
+ fi
+done
+
+if [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] ||
+ [ -n "$ALPHA" ] && [ -n "$RC" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] ||
+ [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] ||
+ [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$PRE" ] ||
+ [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$PRE" ]; then
+ echo " $USAGE"
+ exit 1
+elif [ -n "$ALPHA" ] ; then
+ VER_TAG="Alpha $ALPHA"
+ VER_NUMTAG="-alpha$ALPHA"
+elif [ -n "$BETA" ] ; then
+ VER_TAG="Beta $BETA"
+ VER_NUMTAG="-beta$BETA"
+elif [ -n "$RC" ] ; then
+ VER_TAG="Release Candidate $RC"
+ VER_NUMTAG="-rc$RC"
+elif [ -n "$NIGHTLY" ] ; then
+ VER_TAG="Nightly Build ($NIGHTLY)"
+ VER_NUMTAG="-nightly-$NIGHTLY"
+elif [ -n "$PRE" ] ; then
+ VER_TAG="Pre-release $PRE"
+ VER_NUMTAG="-pre$PRE"
+else
+ VER_TAG="r$REVISION"
+ VER_NUMTAG=""
+fi
+
+if [ -n "$ZIP" ] ; then
+ EXTRA_EXPORT_OPTIONS="--native-eol CRLF"
+fi
+
+if [ -z "$VERSION" ] || [ -z "$REVISION" ] || [ -z "$REPOS_PATH" ]; then
+ echo " $USAGE"
+ exit 1
+fi
+
+if [ -z "$APR_PATH" ]; then
+ APR_PATH='apr'
+fi
+
+if [ -z "$APRU_PATH" ]; then
+ APRU_PATH='apr-util'
+fi
+
+if [ -z "$NEON_PATH" ]; then
+ NEON_PATH='neon'
+fi
+
+if [ -z "$SERF_PATH" ]; then
+ SERF_PATH='serf'
+fi
+
+if [ -z "$APRI_PATH" ]; then
+ APRI_PATH='apr-iconv'
+fi
+
+if [ -z "$ZLIB_PATH" ]; then
+ ZLIB_PATH='zlib'
+fi
+
+if [ -z "$SQLITE_PATH" ]; then
+ SQLITE_PATH='sqlite-amalgamation'
+fi
+
+REPOS_PATH="`echo $REPOS_PATH | sed 's/^\/*//'`"
+
+# See comment when we 'roll' the tarballs as to why pax is required.
+type pax > /dev/null 2>&1
+if [ $? -ne 0 ] && [ -z "$ZIP" ]; then
+ echo "ERROR: pax could not be found"
+ exit 1
+fi
+
+DISTNAME="subversion-${VERSION}${VER_NUMTAG}"
+DIST_SANDBOX=.dist_sandbox
+DISTPATH="$DIST_SANDBOX/$DISTNAME"
+
+echo "Distribution will be named: $DISTNAME"
+echo " constructed from path: /$REPOS_PATH"
+echo " constructed from revision: $REVISION"
+
+rm -rf "$DIST_SANDBOX"
+mkdir "$DIST_SANDBOX"
+echo "Removed and recreated $DIST_SANDBOX"
+
+LC_ALL=C
+LANG=C
+TZ=UTC
+export LC_ALL
+export LANG
+export TZ
+
+echo "Exporting $REPOS_PATH r$REVISION into sandbox..."
+(cd "$DIST_SANDBOX" && \
+ ${SVN:-svn} export -q $EXTRA_EXPORT_OPTIONS \
+ "https://svn.apache.org/repos/asf/subversion/$REPOS_PATH"@"$REVISION" \
+ "$DISTNAME" --username none --password none)
+
+rm -f "$DISTPATH/STATUS"
+
+ver_major=`echo $VERSION | cut -d '.' -f 1`
+ver_minor=`echo $VERSION | cut -d '.' -f 2`
+ver_patch=`echo $VERSION | cut -d '.' -f 3`
+
+# Remove contrib/ from our distribution tarball. Some of it is of
+# unknown license, and usefulness.
+# (See http://svn.haxx.se/dev/archive-2009-04/0166.shtml for discussion.)
+if [ "$ver_major" -eq "1" -a "$ver_minor" -ge "7" ]; then
+ rm -rf "$DISTPATH/contrib"
+fi
+
+# Remove notes/ from our distribution tarball. It's large, but largely
+# blue-sky and out-of-date, and of questionable use to end users.
+if [ "$ver_major" -eq "1" -a "$ver_minor" -ge "7" ]; then
+ rm -rf "$DISTPATH/notes"
+fi
+
+# Remove packages/ from the tarball.
+# (See http://svn.haxx.se/dev/archive-2009-12/0205.shtml)
+if [ "$ver_major" -eq "1" -a "$ver_minor" -ge "7" ]; then
+ rm -rf "$DISTPATH/packages"
+fi
+
+# Remove www/ from the tarball for 1.6.x and earlier releases
+if [ "$ver_major" -eq "1" -a "$ver_minor" -le "6" ]; then
+ rm -rf "$DISTPATH/www"
+fi
+
+# Check for a recent enough Python
+# Instead of attempting to deal with various line ending issues, just export
+# the find_python script manually.
+${svn:-svn} export -q -r "$REVISION" \
+ "https://svn.apache.org/repos/asf/subversion/$REPOS_PATH/build/find_python.sh" \
+ --username none --password none "$DIST_SANDBOX/find_python.sh"
+PYTHON="`$DIST_SANDBOX/find_python.sh`"
+if test -z "$PYTHON"; then
+ echo "Python 2.4 or later is required to run dist.sh"
+ echo "If you have a suitable Python installed, but not on the"
+ echo "PATH, set the environment variable PYTHON to the full path"
+ echo "to the Python executable, and re-run dist.sh"
+ exit 1
+fi
+
+find "$DISTPATH" -name config.nice -print | xargs rm -f
+
+# Massage the new version number into svn_version.h. We need to do
+# this before running autogen.sh --release on the subversion code,
+# because otherwise svn_version.h's mtime makes SWIG files regenerate
+# on end-user's systems, when they should just be compiled by the
+# Release Manager and left at that.
+
+vsn_file="$DISTPATH/subversion/include/svn_version.h"
+if [ "$VERSION" != "trunk" ] && [ "$VERSION" != "nightly" ]; then
+ sed \
+ -e "/#define *SVN_VER_MAJOR/s/[0-9][0-9]*/$ver_major/" \
+ -e "/#define *SVN_VER_MINOR/s/[0-9][0-9]*/$ver_minor/" \
+ -e "/#define *SVN_VER_PATCH/s/[0-9][0-9]*/$ver_patch/" \
+ -e "/#define *SVN_VER_TAG/s/\".*\"/\" ($VER_TAG)\"/" \
+ -e "/#define *SVN_VER_NUMTAG/s/\".*\"/\"$VER_NUMTAG\"/" \
+ -e "/#define *SVN_VER_REVISION/s/[0-9][0-9]*/$REVISION/" \
+ < "$vsn_file" > "$vsn_file.tmp"
+else
+ # Don't munge the version number if we are creating a nightly trunk tarball
+ sed \
+ -e "/#define *SVN_VER_TAG/s/\".*\"/\" ($VER_TAG)\"/" \
+ -e "/#define *SVN_VER_NUMTAG/s/\".*\"/\"$VER_NUMTAG\"/" \
+ -e "/#define *SVN_VER_REVISION/s/[0-9]\\+/$REVISION/" \
+ < "$vsn_file" > "$vsn_file.tmp"
+fi
+mv -f "$vsn_file.tmp" "$vsn_file"
+
+echo "Creating svn_version.h.dist, for use in tagging matching tarball..."
+cp "$vsn_file" "svn_version.h.dist"
+
+# Don't run autogen.sh when we are building the Windows zip file.
+# Windows users don't need the files generated by this command,
+# especially not the generated projects or SWIG files.
+if [ -z "$ZIP" ] ; then
+ echo "Running ./autogen.sh in sandbox, to create ./configure ..."
+ (cd "$DISTPATH" && ./autogen.sh --release) || exit 1
+fi
+
+# Generate the .pot file, for use by translators.
+echo "Running po-update.sh in sandbox, to create subversion.pot..."
+# Can't use the po-update.sh in the packaged export since it might have CRLF
+# line endings, in which case it won't run. So first we export it again.
+${svn:-svn} export -q -r "$REVISION" \
+ "https://svn.apache.org/repos/asf/subversion/$REPOS_PATH/tools/po/po-update.sh" \
+ --username none --password none "$DIST_SANDBOX/po-update.sh"
+(cd "$DISTPATH" && ../po-update.sh pot) || exit 1
+
+# Pre-translate the various sql-derived header files
+echo "Generating SQL-derived headers..."
+for f in `find "$DISTPATH/subversion" -name '*.sql'`; do
+ $PYTHON $DISTPATH/build/transform_sql.py $f `echo $f | sed 's/\.[^\.]*$//'`.h
+done
+
+echo "Removing any autom4te.cache directories that might exist..."
+find "$DISTPATH" -depth -type d -name 'autom4te*.cache' -exec rm -rf {} \;
+
+if [ -z "$ZIP" ]; then
+ # Do not use tar, it's probably GNU tar which produces tar files that are
+ # not compliant with POSIX.1 when including filenames longer than 100 chars.
+ # Platforms without a tar that understands the GNU tar extension will not
+ # be able to extract the resulting tar file. Use pax to produce POSIX.1
+ # tar files.
+ echo "Rolling $DISTNAME.tar ..."
+ (cd "$DIST_SANDBOX" > /dev/null && pax -x ustar -w "$DISTNAME") > \
+ "$DISTNAME.tar"
+
+ echo "Compressing to $DISTNAME.tar.bz2 ..."
+ bzip2 -9fk "$DISTNAME.tar"
+
+ # Use the gzip -n flag - this prevents it from storing the original name of
+ # the .tar file, and far more importantly, the mtime of the .tar file, in the
+ # produced .tar.gz file. This is important, because it makes the gzip
+ # encoding reproducable by anyone else who has an similar version of gzip,
+ # and also uses "gzip -9n". This means that committers who want to GPG-sign
+ # both the .tar.gz and the .tar.bz2 can download the .tar.bz2 (which is
+ # smaller), and locally generate an exact duplicate of the official .tar.gz
+ # file. This metadata is data on the temporary uncompressed tarball itself,
+ # not any of its contents, so there will be no effect on end-users.
+ echo "Compressing to $DISTNAME.tar.gz ..."
+ gzip -9nf "$DISTNAME.tar"
+else
+ echo "Rolling $DISTNAME.zip ..."
+ (cd "$DIST_SANDBOX" > /dev/null && zip -q -r - "$DISTNAME") > \
+ "$DISTNAME.zip"
+fi
+echo "Removing sandbox..."
+rm -rf "$DIST_SANDBOX"
+
+sign_file()
+{
+ if [ -n "$SIGN" ]; then
+ type gpg > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ if test -n "$user"; then
+ args="--default-key $user"
+ fi
+ for ARG in $@
+ do
+ gpg --armor $args --detach-sign $ARG
+ done
+ else
+ type pgp > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ if test -n "$user"; then
+ args="-u $user"
+ fi
+ for ARG in $@
+ do
+ pgp -sba $ARG $args
+ done
+ fi
+ fi
+ fi
+}
+
+# allow md5sum, sha1sum, and sha512sum tool names to be overridden
+[ -n "$MD5SUM" ] || MD5SUM=md5sum
+[ -n "$SHA1SUM" ] || SHA1SUM=sha1sum
+[ -n "$SHA512SUM" ] || SHA512SUM=sha512sum
+
+echo ""
+echo "Done:"
+if [ -z "$ZIP" ]; then
+ ls -l "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz"
+ sign_file $DISTNAME.tar.gz $DISTNAME.tar.bz2
+ echo ""
+ echo "md5sums:"
+ $MD5SUM "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz"
+ type $SHA1SUM > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "sha1sums:"
+ $SHA1SUM "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz"
+ fi
+ type $SHA512SUM > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "sha512sums:"
+ $SHA512SUM "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz"
+ fi
+else
+ ls -l "$DISTNAME.zip"
+ sign_file $DISTNAME.zip
+ echo ""
+ echo "md5sum:"
+ $MD5SUM "$DISTNAME.zip"
+ type $SHA1SUM > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "sha1sum:"
+ $SHA1SUM "$DISTNAME.zip"
+ fi
+ type $SHA512SUM > /dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ echo ""
+ echo "sha512sum:"
+ $SHA512SUM "$DISTNAME.zip"
+ fi
+fi
diff --git a/tools/dist/extract-for-examination.sh b/tools/dist/extract-for-examination.sh
new file mode 100755
index 0000000..1dfe7df
--- /dev/null
+++ b/tools/dist/extract-for-examination.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+for i in *.tar.bz2; do
+ base=${i%.tar.bz2}-tar-bz2
+ mkdir $base
+ cd $base
+ tar -jxvf ../$i
+ cd ..
+done
+
+for i in *.zip; do
+ base=${i%.zip}-zip
+ mkdir $base
+ cd $base
+ unzip ../$i
+ cd ..
+done
diff --git a/tools/dist/merge-approved-backports.py b/tools/dist/merge-approved-backports.py
new file mode 100755
index 0000000..84278c1
--- /dev/null
+++ b/tools/dist/merge-approved-backports.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""\
+Automatic backport merging script.
+
+This script is run from cron. It may also be run interactively, however, it
+has no interactive features.
+
+Run this script from the root of a stable branch's working copy (e.g.,
+a working copy of /branches/1.9.x). This script will iterate the STATUS file
+and commit every nomination in the section "Approved changes".
+"""
+
+import sys
+assert sys.version_info[0] == 3, "This script targets Python 3"
+
+import backport.status
+import backport.merger
+
+if sys.argv[1:]:
+ # Usage.
+ print(__doc__)
+ sys.exit(0)
+
+backport.merger.no_local_mods('./STATUS')
+
+while True:
+ backport.merger.run_svn_quiet(['update'])
+ sf = backport.status.StatusFile(open('./STATUS', encoding="UTF-8"))
+ for entry_para in sf.entries_paras():
+ if entry_para.approved():
+ entry = entry_para.entry()
+ backport.merger.merge(entry, commit=True)
+ break # 'continue' the outer loop
+ else:
+ break
diff --git a/tools/dist/nightly.sh b/tools/dist/nightly.sh
new file mode 100755
index 0000000..b167ab3
--- /dev/null
+++ b/tools/dist/nightly.sh
@@ -0,0 +1,98 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -e
+
+repo=http://svn.apache.org/repos/asf/subversion
+svn=svn
+olds=7
+
+# Parse our arguments
+while getopts "cd:t:s:o:" flag; do
+ case $flag in
+ d) dir="`cd $OPTARG && pwd`" ;; # abspath
+ c) clean="1" ;;
+ t) target="$OPTARG" ;;
+ s) svn="$OPTARG" ;;
+ o) olds="$OPTARG" ;;
+ esac
+done
+
+# Setup directories
+if [ -n "$dir" ]; then cd $dir; else dir="."; fi
+if [ -d "roll" ]; then rm -rf roll; fi
+mkdir roll
+if [ ! -n "$target" ]; then
+ if [ ! -d "target" ]; then mkdir target; fi
+ target="target"
+fi
+
+abscwd=`cd $dir; pwd`
+
+echo "Will place results in: $target"
+
+# get youngest
+head=`$svn info $repo/trunk | grep '^Revision' | cut -d ' ' -f 2`
+
+# Get the latest versions of the rolling scripts
+for i in release.py dist.sh
+do
+ $svn export --force -r $head $repo/trunk/tools/dist/$i@$head $dir/$i
+done
+# We also need ezt
+$svn export --force -r $head $repo/trunk/build/generator/ezt.py@$head $dir/ezt.py
+
+# Create the environment
+cd roll
+echo '----------------building environment------------------'
+../release.py --verbose --base-dir ${abscwd}/roll build-env trunk-nightly
+
+# Roll the tarballs
+echo '-------------------rolling tarball--------------------'
+../release.py --verbose --base-dir ${abscwd}/roll roll --branch trunk trunk-nightly $head
+cd ..
+
+# Create the information page
+echo '-------------------moving results---------------------'
+# ./release.py --base-dir ${abscwd}/roll post-candidates trunk-nightly $head \
+# --target $target
+if [ ! -d "$target/dist" ]; then mkdir "$target/dist"; fi
+if [ -d "$target/dist/r$head" ]; then rm -r "$target/dist/r$head"; fi
+mv roll/deploy $target/dist/r$head
+
+# Some static links for the most recent artifacts.
+ln -sf "r$head" "$target/dist/current"
+ls "$target/dist/r$head" | while read fname; do
+ ln -sf "r$head/$fname" "$target/dist/$fname"
+done
+
+# Clean up old results
+ls -t1 "$target/dist/" | sed -e "1,${olds}d" | while read d; do
+ rm -rf "$target/dist/$d"
+done
+
+# Optionally remove our working directory
+if [ -n "$clean" ]; then
+ echo '--------------------cleaning up-----------------------'
+ rm -rf roll
+fi
+
+echo '------------------------done--------------------------'
diff --git a/tools/dist/nominate.pl b/tools/dist/nominate.pl
new file mode 120000
index 0000000..411377e
--- /dev/null
+++ b/tools/dist/nominate.pl
@@ -0,0 +1 @@
+backport.pl \ No newline at end of file
diff --git a/tools/dist/rat-excludes b/tools/dist/rat-excludes
new file mode 100644
index 0000000..798a45a
--- /dev/null
+++ b/tools/dist/rat-excludes
@@ -0,0 +1,49 @@
+contrib/
+www/
+BUGS/
+CHANGES/
+COMMITTERS/
+HACKING/
+TRANSLATING/
+build/win32/empty.c
+build/config.guess
+build/config.sub
+build/generator/__init__.py
+build/generator/util/__init__.py
+build/install-sh
+doc/doxygen.conf
+notes/**
+packages/
+subversion/tests/cmdline/getopt_tests_data/*
+subversion/tests/cmdline/diff_tests_data/*
+subversion/bindings/swig/NOTES
+subversion/libsvn_fs_base/notes/TODO
+subversion/libsvn_fs_base/notes/fs-history
+subversion/libsvn_fs_base/notes/structure
+subversion/libsvn_fs_fs/structure
+subversion/libsvn_ra_svn/protocol
+subversion/bindings/javahl/doc/index.html
+subversion/bindings/swig/python/tests/trac/__init__.py
+subversion/bindings/swig/python/tests/trac/versioncontrol/__init__.py
+subversion/bindings/ctypes-python/TODO
+subversion/bindings/ctypes-python/test/test.dumpfile
+subversion/bindings/ctypes-python/csvn/__init__.py
+subversion/bindings/ctypes-python/csvn/ext/__init__.py
+subversion/tests/cmdline/svntest/err.py
+tools/buildbot/master/public_html/buildbot.css
+tools/dist/rat-excludes
+tools/dist/security/_gnupg.py
+tools/dist/templates/*.ezt
+tools/dev/iz/defect.dem
+tools/dev/iz/ff2csv.command
+tools/dev/benchmarks/suite1/crontab.entry
+tools/hook-scripts/mailer/tests/mailer-t1.output
+**/*.dump
+**/*.icns
+**/*.odp
+**/*.pal
+**/*.patch
+**/*.txt
+**/*.svg
+**/*.rtf
+**/*.example
diff --git a/tools/dist/release.py b/tools/dist/release.py
new file mode 100755
index 0000000..5518d6f
--- /dev/null
+++ b/tools/dist/release.py
@@ -0,0 +1,1367 @@
+#!/usr/bin/env python
+# python: coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+
+# About this script:
+# This script is intended to simplify creating Subversion releases for
+# any of the supported release lines of Subversion.
+# It works well with our Apache infrastructure, and should make rolling,
+# posting, and announcing releases dirt simple.
+#
+# This script may be run on a number of platforms, but it is intended to
+# be run on people.apache.org. As such, it may have dependencies (such
+# as Python version) which may not be common, but are guaranteed to be
+# available on people.apache.org.
+
+# It'd be kind of nice to use the Subversion python bindings in this script,
+# but people.apache.org doesn't currently have them installed
+
+# Stuff we need
+import os
+import re
+import sys
+import glob
+import fnmatch
+import shutil
+import urllib2
+import hashlib
+import tarfile
+import logging
+import datetime
+import tempfile
+import operator
+import itertools
+import subprocess
+import argparse # standard in Python 2.7
+
+# Find ezt, using Subversion's copy, if there isn't one on the system.
+try:
+ import ezt
+except ImportError:
+ ezt_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0])))
+ ezt_path = os.path.join(ezt_path, 'build', 'generator')
+ sys.path.append(ezt_path)
+
+ import ezt
+ sys.path.remove(ezt_path)
+
+
+# Our required / recommended release tool versions by release branch
+tool_versions = {
+ 'trunk' : {
+ 'autoconf' : ['2.69',
+ '954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969'],
+ 'libtool' : ['2.4.6',
+ 'e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3'],
+ 'swig' : ['3.0.10',
+ '2939aae39dec06095462f1b95ce1c958ac80d07b926e48871046d17c0094f44c'],
+ },
+ '1.10' : {
+ 'autoconf' : ['2.69',
+ '954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969'],
+ 'libtool' : ['2.4.6',
+ 'e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3'],
+ 'swig' : ['3.0.10',
+ '2939aae39dec06095462f1b95ce1c958ac80d07b926e48871046d17c0094f44c'],
+ },
+ '1.9' : {
+ 'autoconf' : ['2.69',
+ '954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969'],
+ 'libtool' : ['2.4.6',
+ 'e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3'],
+ 'swig' : ['2.0.12',
+ '65e13f22a60cecd7279c59882ff8ebe1ffe34078e85c602821a541817a4317f7'],
+ },
+ '1.8' : {
+ 'autoconf' : ['2.69',
+ '954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969'],
+ 'libtool' : ['2.4.3',
+ '36b4881c1843d7585de9c66c4c3d9a067ed3a3f792bc670beba21f5a4960acdf'],
+ 'swig' : ['2.0.9',
+ '586954000d297fafd7e91d1ad31089cc7e249f658889d11a44605d3662569539'],
+ },
+}
+
+# The version that is our current recommended release
+# ### TODO: derive this from svn_version.h; see ../../build/getversion.py
+recommended_release = '1.9'
+
+# Some constants
+repos = 'https://svn.apache.org/repos/asf/subversion'
+secure_repos = 'https://svn.apache.org/repos/asf/subversion'
+dist_repos = 'https://dist.apache.org/repos/dist'
+dist_dev_url = dist_repos + '/dev/subversion'
+dist_release_url = dist_repos + '/release/subversion'
+KEYS = 'https://people.apache.org/keys/group/subversion.asc'
+extns = ['zip', 'tar.gz', 'tar.bz2']
+
+
+#----------------------------------------------------------------------
+# Utility functions
+
+class Version(object):
+ regex = re.compile(r'(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
+
+ def __init__(self, ver_str):
+ # Special case the 'trunk-nightly' version
+ if ver_str == 'trunk-nightly':
+ self.major = None
+ self.minor = None
+ self.patch = None
+ self.pre = 'nightly'
+ self.pre_num = None
+ self.base = 'nightly'
+ self.branch = 'trunk'
+ return
+
+ match = self.regex.search(ver_str)
+
+ if not match:
+ raise RuntimeError("Bad version string '%s'" % ver_str)
+
+ self.major = int(match.group(1))
+ self.minor = int(match.group(2))
+ self.patch = int(match.group(3))
+
+ if match.group(4):
+ self.pre = match.group(4)
+ self.pre_num = int(match.group(5))
+ else:
+ self.pre = None
+ self.pre_num = None
+
+ self.base = '%d.%d.%d' % (self.major, self.minor, self.patch)
+ self.branch = '%d.%d' % (self.major, self.minor)
+
+ def is_prerelease(self):
+ return self.pre != None
+
+ def is_recommended(self):
+ return self.branch == recommended_release
+
+ def get_download_anchor(self):
+ if self.is_prerelease():
+ return 'pre-releases'
+ else:
+ if self.is_recommended():
+ return 'recommended-release'
+ else:
+ return 'supported-releases'
+
+ def get_ver_tags(self, revnum):
+ # These get substituted into svn_version.h
+ ver_tag = ''
+ ver_numtag = ''
+ if self.pre == 'alpha':
+ ver_tag = '" (Alpha %d)"' % self.pre_num
+ ver_numtag = '"-alpha%d"' % self.pre_num
+ elif self.pre == 'beta':
+ ver_tag = '" (Beta %d)"' % args.version.pre_num
+ ver_numtag = '"-beta%d"' % self.pre_num
+ elif self.pre == 'rc':
+ ver_tag = '" (Release Candidate %d)"' % self.pre_num
+ ver_numtag = '"-rc%d"' % self.pre_num
+ elif self.pre == 'nightly':
+ ver_tag = '" (Nightly Build r%d)"' % revnum
+ ver_numtag = '"-nightly-r%d"' % revnum
+ else:
+ ver_tag = '" (r%d)"' % revnum
+ ver_numtag = '""'
+ return (ver_tag, ver_numtag)
+
+ def __serialize(self):
+ return (self.major, self.minor, self.patch, self.pre, self.pre_num)
+
+ def __eq__(self, that):
+ return self.__serialize() == that.__serialize()
+
+ def __ne__(self, that):
+ return self.__serialize() != that.__serialize()
+
+ def __hash__(self):
+ return hash(self.__serialize())
+
+ def __lt__(self, that):
+ if self.major < that.major: return True
+ if self.major > that.major: return False
+
+ if self.minor < that.minor: return True
+ if self.minor > that.minor: return False
+
+ if self.patch < that.patch: return True
+ if self.patch > that.patch: return False
+
+ if not self.pre and not that.pre: return False
+ if not self.pre and that.pre: return False
+ if self.pre and not that.pre: return True
+
+ # We are both pre-releases
+ if self.pre != that.pre:
+ return self.pre < that.pre
+ else:
+ return self.pre_num < that.pre_num
+
+ def __str__(self):
+ "Return an SVN_VER_NUMBER-formatted string, or 'nightly'."
+ if self.pre:
+ if self.pre == 'nightly':
+ return 'nightly'
+ else:
+ extra = '-%s%d' % (self.pre, self.pre_num)
+ else:
+ extra = ''
+
+ return self.base + extra
+
+ def __repr__(self):
+
+ return "Version(%s)" % repr(str(self))
+
+def get_prefix(base_dir):
+ return os.path.join(base_dir, 'prefix')
+
+def get_tempdir(base_dir):
+ return os.path.join(base_dir, 'tempdir')
+
+def get_workdir(base_dir):
+ return os.path.join(get_tempdir(base_dir), 'working')
+
+# The name of this directory is also used to name the tarball and for
+# the root of paths within the tarball, e.g. subversion-1.9.5 or
+# subversion-nightly-r1800000
+def get_exportdir(base_dir, version, revnum):
+ if version.pre != 'nightly':
+ return os.path.join(get_tempdir(base_dir), 'subversion-'+str(version))
+ return os.path.join(get_tempdir(base_dir),
+ 'subversion-%s-r%d' % (version, revnum))
+
+def get_deploydir(base_dir):
+ return os.path.join(base_dir, 'deploy')
+
+def get_target(args):
+ "Return the location of the artifacts"
+ if args.target:
+ return args.target
+ else:
+ return get_deploydir(args.base_dir)
+
+def get_tmpldir():
+ return os.path.join(os.path.abspath(sys.path[0]), 'templates')
+
+def get_tmplfile(filename):
+ try:
+ return open(os.path.join(get_tmpldir(), filename))
+ except IOError:
+ # Hmm, we had a problem with the local version, let's try the repo
+ return urllib2.urlopen(repos + '/trunk/tools/dist/templates/' + filename)
+
+def get_nullfile():
+ return open(os.path.devnull, 'w')
+
+def run_script(verbose, script, hide_stderr=False):
+ stderr = None
+ if verbose:
+ stdout = None
+ else:
+ stdout = get_nullfile()
+ if hide_stderr:
+ stderr = get_nullfile()
+
+ for l in script.split('\n'):
+ subprocess.check_call(l.split(), stdout=stdout, stderr=stderr)
+
+def download_file(url, target, checksum):
+ response = urllib2.urlopen(url)
+ target_file = open(target, 'w+')
+ target_file.write(response.read())
+ target_file.seek(0)
+ m = hashlib.sha256()
+ m.update(target_file.read())
+ target_file.close()
+ checksum2 = m.hexdigest()
+ if checksum != checksum2:
+ raise RuntimeError("Checksum mismatch for '%s': "\
+ "downloaded: '%s'; expected: '%s'" % \
+ (target, checksum, checksum2))
+
+#----------------------------------------------------------------------
+# ezt helpers
+
+# In ezt, «[if-any foo]» is true when «data['foo'] == False»,
+# hence, provide this constant for readability.
+ezt_False = ""
+
+# And this constant for symmetry.
+ezt_True = True
+
+# And this for convenience.
+def ezt_bool(boolean_value):
+ return ezt_True if boolean_value else ezt_False
+
+#----------------------------------------------------------------------
+# Cleaning up the environment
+
+def cleanup(args):
+ 'Remove generated files and folders.'
+ logging.info('Cleaning')
+
+ shutil.rmtree(get_prefix(args.base_dir), True)
+ shutil.rmtree(get_tempdir(args.base_dir), True)
+ shutil.rmtree(get_deploydir(args.base_dir), True)
+
+
+#----------------------------------------------------------------------
+# Creating an environment to roll the release
+
+class RollDep(object):
+ 'The super class for each of the build dependencies.'
+ def __init__(self, base_dir, use_existing, verbose):
+ self._base_dir = base_dir
+ self._use_existing = use_existing
+ self._verbose = verbose
+
+ def _test_version(self, cmd):
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ (stdout, stderr) = proc.communicate()
+ rc = proc.wait()
+ if rc: return ''
+
+ return stdout.split('\n')
+
+ def build(self):
+ if not hasattr(self, '_extra_configure_flags'):
+ self._extra_configure_flags = ''
+ cwd = os.getcwd()
+ tempdir = get_tempdir(self._base_dir)
+ tarball = os.path.join(tempdir, self._filebase + '.tar.gz')
+
+ if os.path.exists(tarball):
+ if not self._use_existing:
+ raise RuntimeError('autoconf tarball "%s" already exists'
+ % tarball)
+ logging.info('Using existing %s.tar.gz' % self._filebase)
+ else:
+ logging.info('Fetching %s' % self._filebase)
+ download_file(self._url, tarball, self._checksum)
+
+ # Extract tarball
+ tarfile.open(tarball).extractall(tempdir)
+
+ logging.info('Building ' + self.label)
+ os.chdir(os.path.join(tempdir, self._filebase))
+ run_script(self._verbose,
+ '''./configure --prefix=%s %s
+ make
+ make install''' % (get_prefix(self._base_dir),
+ self._extra_configure_flags))
+
+ os.chdir(cwd)
+
+
+class AutoconfDep(RollDep):
+ def __init__(self, base_dir, use_existing, verbose, autoconf_ver, checksum):
+ RollDep.__init__(self, base_dir, use_existing, verbose)
+ self.label = 'autoconf'
+ self._filebase = 'autoconf-' + autoconf_ver
+ self._autoconf_ver = autoconf_ver
+ self._url = 'https://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase
+ self._checksum = checksum
+
+ def have_usable(self):
+ output = self._test_version(['autoconf', '-V'])
+ if not output: return False
+
+ version = output[0].split()[-1:][0]
+ return version == self._autoconf_ver
+
+ def use_system(self):
+ if not self._use_existing: return False
+ return self.have_usable()
+
+
+class LibtoolDep(RollDep):
+ def __init__(self, base_dir, use_existing, verbose, libtool_ver, checksum):
+ RollDep.__init__(self, base_dir, use_existing, verbose)
+ self.label = 'libtool'
+ self._filebase = 'libtool-' + libtool_ver
+ self._libtool_ver = libtool_ver
+ self._url = 'https://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase
+ self._checksum = checksum
+
+ def have_usable(self):
+ output = self._test_version(['libtool', '--version'])
+ if not output: return False
+
+ return self._libtool_ver in output[0]
+
+ def use_system(self):
+ # We unconditionally return False here, to avoid using a borked
+ # system libtool (I'm looking at you, Debian).
+ return False
+
+ def build(self):
+ RollDep.build(self)
+ # autogen.sh looks for glibtoolize before libtoolize
+ bin_dir = os.path.join(get_prefix(self._base_dir), "bin")
+ os.symlink("libtoolize", os.path.join(bin_dir, "glibtoolize"))
+ os.symlink("libtool", os.path.join(bin_dir, "glibtool"))
+
+
+class SwigDep(RollDep):
+ def __init__(self, base_dir, use_existing, verbose, swig_ver, checksum,
+ sf_mirror):
+ RollDep.__init__(self, base_dir, use_existing, verbose)
+ self.label = 'swig'
+ self._filebase = 'swig-' + swig_ver
+ self._swig_ver = swig_ver
+ self._url = 'https://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \
+ { 'swig' : self._filebase,
+ 'sf_mirror' : sf_mirror }
+ self._checksum = checksum
+ self._extra_configure_flags = '--without-pcre'
+
+ def have_usable(self):
+ output = self._test_version(['swig', '-version'])
+ if not output: return False
+
+ version = output[1].split()[-1:][0]
+ return version == self._swig_ver
+
+ def use_system(self):
+ if not self._use_existing: return False
+ return self.have_usable()
+
+
+def build_env(args):
+ 'Download prerequisites for a release and prepare the environment.'
+ logging.info('Creating release environment')
+
+ try:
+ os.mkdir(get_prefix(args.base_dir))
+ os.mkdir(get_tempdir(args.base_dir))
+ except OSError:
+ if not args.use_existing:
+ raise
+
+ autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['autoconf'][0],
+ tool_versions[args.version.branch]['autoconf'][1])
+ libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['libtool'][0],
+ tool_versions[args.version.branch]['libtool'][1])
+ swig = SwigDep(args.base_dir, args.use_existing, args.verbose,
+ tool_versions[args.version.branch]['swig'][0],
+ tool_versions[args.version.branch]['swig'][1],
+ args.sf_mirror)
+
+ # iterate over our rolling deps, and build them if needed
+ for dep in [autoconf, libtool, swig]:
+ if dep.use_system():
+ logging.info('Using system %s' % dep.label)
+ else:
+ dep.build()
+
+
+#----------------------------------------------------------------------
+# Create release artifacts
+
+def compare_changes(repos, branch, revision):
+ mergeinfo_cmd = ['svn', 'mergeinfo', '--show-revs=eligible',
+ repos + '/trunk/CHANGES',
+ repos + '/' + branch + '/' + 'CHANGES']
+ stdout = subprocess.check_output(mergeinfo_cmd)
+ if stdout:
+ # Treat this as a warning since we are now putting entries for future
+ # minor releases in CHANGES on trunk.
+ logging.warning('CHANGES has unmerged revisions: %s' %
+ stdout.replace("\n", " "))
+
+
+_current_year = str(datetime.datetime.now().year)
+_copyright_re = re.compile(r'Copyright (?:\(C\) )?(?P<year>[0-9]+)'
+ r' The Apache Software Foundation',
+ re.MULTILINE)
+
+def check_copyright_year(repos, branch, revision):
+ def check_file(branch_relpath):
+ file_url = (repos + '/' + branch + '/'
+ + branch_relpath + '@' + str(revision))
+ cat_cmd = ['svn', 'cat', file_url]
+ stdout = subprocess.check_output(cat_cmd)
+ m = _copyright_re.search(stdout)
+ if m:
+ year = m.group('year')
+ else:
+ year = None
+ if year != _current_year:
+ logging.warning('Copyright year in ' + branch_relpath
+ + ' is not the current year')
+ check_file('NOTICE')
+ check_file('subversion/libsvn_subr/version.c')
+
+def replace_lines(path, actions):
+ with open(path, 'r') as old_content:
+ lines = old_content.readlines()
+ with open(path, 'w') as new_content:
+ for line in lines:
+ for start, pattern, repl in actions:
+ if line.startswith(start):
+ line = re.sub(pattern, repl, line)
+ new_content.write(line)
+
+def roll_tarballs(args):
+ 'Create the release artifacts.'
+
+ if not args.branch:
+ args.branch = 'branches/%d.%d.x' % (args.version.major, args.version.minor)
+
+ branch = args.branch # shorthand
+ branch = branch.rstrip('/') # canonicalize for later comparisons
+
+ logging.info('Rolling release %s from branch %s@%d' % (args.version,
+ branch, args.revnum))
+
+ check_copyright_year(repos, args.branch, args.revnum)
+
+ # Ensure we've got the appropriate rolling dependencies available
+ autoconf = AutoconfDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['autoconf'][0],
+ tool_versions[args.version.branch]['autoconf'][1])
+ libtool = LibtoolDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['libtool'][0],
+ tool_versions[args.version.branch]['libtool'][1])
+ swig = SwigDep(args.base_dir, False, args.verbose,
+ tool_versions[args.version.branch]['swig'][0],
+ tool_versions[args.version.branch]['swig'][1], None)
+
+ for dep in [autoconf, libtool, swig]:
+ if not dep.have_usable():
+ raise RuntimeError('Cannot find usable %s' % dep.label)
+
+ if branch != 'trunk':
+ # Make sure CHANGES is sync'd.
+ compare_changes(repos, branch, args.revnum)
+
+ # Ensure the output directory doesn't already exist
+ if os.path.exists(get_deploydir(args.base_dir)):
+ raise RuntimeError('output directory \'%s\' already exists'
+ % get_deploydir(args.base_dir))
+
+ os.mkdir(get_deploydir(args.base_dir))
+
+ logging.info('Preparing working copy source')
+ shutil.rmtree(get_workdir(args.base_dir), True)
+ run_script(args.verbose, 'svn checkout %s %s'
+ % (repos + '/' + branch + '@' + str(args.revnum),
+ get_workdir(args.base_dir)))
+
+ # Exclude stuff we don't want in the tarball, it will not be present
+ # in the exported tree.
+ exclude = ['contrib', 'notes']
+ if branch != 'trunk':
+ exclude += ['STATUS']
+ if args.version.minor < 7:
+ exclude += ['packages', 'www']
+ cwd = os.getcwd()
+ os.chdir(get_workdir(args.base_dir))
+ run_script(args.verbose,
+ 'svn update --set-depth exclude %s' % " ".join(exclude))
+ os.chdir(cwd)
+
+ if args.patches:
+ # Assume patches are independent and can be applied in any
+ # order, no need to sort.
+ majmin = '%d.%d' % (args.version.major, args.version.minor)
+ for name in os.listdir(args.patches):
+ if name.find(majmin) != -1 and name.endswith('patch'):
+ logging.info('Applying patch %s' % name)
+ run_script(args.verbose,
+ '''svn patch %s %s'''
+ % (os.path.join(args.patches, name),
+ get_workdir(args.base_dir)))
+
+ # Massage the new version number into svn_version.h.
+ ver_tag, ver_numtag = args.version.get_ver_tags(args.revnum)
+ replacements = [('#define SVN_VER_TAG',
+ '".*"', ver_tag),
+ ('#define SVN_VER_NUMTAG',
+ '".*"', ver_numtag),
+ ('#define SVN_VER_REVISION',
+ '[0-9][0-9]*', str(args.revnum))]
+ if args.version.pre != 'nightly':
+ # SVN_VER_PATCH might change for security releases, e.g., when
+ # releasing 1.9.7 from the magic revision of 1.9.6.
+ #
+ # ### Would SVN_VER_MAJOR / SVN_VER_MINOR ever change?
+ # ### Note that SVN_VER_MINOR is duplicated in some places, see
+ # ### <https://subversion.apache.org/docs/community-guide/releasing.html#release-branches>
+ replacements += [('#define SVN_VER_MAJOR',
+ '[0-9][0-9]*', str(args.version.major)),
+ ('#define SVN_VER_MINOR',
+ '[0-9][0-9]*', str(args.version.minor)),
+ ('#define SVN_VER_PATCH',
+ '[0-9][0-9]*', str(args.version.patch))]
+ replace_lines(os.path.join(get_workdir(args.base_dir),
+ 'subversion', 'include', 'svn_version.h'),
+ replacements)
+
+ # Basename for export and tarballs, e.g. subversion-1.9.5 or
+ # subversion-nightly-r1800000
+ exportdir = get_exportdir(args.base_dir, args.version, args.revnum)
+ basename = os.path.basename(exportdir)
+
+ def export(windows):
+ shutil.rmtree(exportdir, True)
+ if windows:
+ eol_style = "--native-eol CRLF"
+ else:
+ eol_style = "--native-eol LF"
+ run_script(args.verbose, "svn export %s %s %s"
+ % (eol_style, get_workdir(args.base_dir), exportdir))
+
+ def transform_sql():
+ for root, dirs, files in os.walk(exportdir):
+ for fname in files:
+ if fname.endswith('.sql'):
+ run_script(args.verbose,
+ 'python build/transform_sql.py %s/%s %s/%s'
+ % (root, fname, root, fname[:-4] + '.h'))
+
+ def clean_autom4te():
+ for root, dirs, files in os.walk(get_workdir(args.base_dir)):
+ for dname in dirs:
+ if dname.startswith('autom4te') and dname.endswith('.cache'):
+ shutil.rmtree(os.path.join(root, dname))
+
+ logging.info('Building Windows tarballs')
+ export(windows=True)
+ os.chdir(exportdir)
+ transform_sql()
+ # Can't use the po-update.sh in the Windows export since it has CRLF
+ # line endings and won't run, so use the one in the working copy.
+ run_script(args.verbose,
+ '%s/tools/po/po-update.sh pot' % get_workdir(args.base_dir))
+ os.chdir(cwd)
+ clean_autom4te() # dist.sh does it but pointless on Windows?
+ os.chdir(get_tempdir(args.base_dir))
+ run_script(args.verbose,
+ 'zip -q -r %s %s' % (basename + '.zip', basename))
+ os.chdir(cwd)
+
+ logging.info('Building Unix tarballs')
+ export(windows=False)
+ os.chdir(exportdir)
+ transform_sql()
+ run_script(args.verbose,
+ '''tools/po/po-update.sh pot
+ ./autogen.sh --release''',
+ hide_stderr=True) # SWIG is noisy
+ os.chdir(cwd)
+ clean_autom4te() # dist.sh does it but probably pointless
+
+ # Do not use tar, it's probably GNU tar which produces tar files
+ # that are not compliant with POSIX.1 when including filenames
+ # longer than 100 chars. Platforms without a tar that understands
+ # the GNU tar extension will not be able to extract the resulting
+ # tar file. Use pax to produce POSIX.1 tar files.
+ #
+ # Use the gzip -n flag - this prevents it from storing the
+ # original name of the .tar file, and far more importantly, the
+ # mtime of the .tar file, in the produced .tar.gz file. This is
+ # important, because it makes the gzip encoding reproducable by
+ # anyone else who has an similar version of gzip, and also uses
+ # "gzip -9n". This means that committers who want to GPG-sign both
+ # the .tar.gz and the .tar.bz2 can download the .tar.bz2 (which is
+ # smaller), and locally generate an exact duplicate of the
+ # official .tar.gz file. This metadata is data on the temporary
+ # uncompressed tarball itself, not any of its contents, so there
+ # will be no effect on end-users.
+ os.chdir(get_tempdir(args.base_dir))
+ run_script(args.verbose,
+ '''pax -x ustar -w -f %s %s
+ bzip2 -9fk %s
+ gzip -9nf %s'''
+ % (basename + '.tar', basename,
+ basename + '.tar',
+ basename + '.tar'))
+ os.chdir(cwd)
+
+ # Move the results to the deploy directory
+ logging.info('Moving artifacts and calculating checksums')
+ for e in extns:
+ filename = basename + '.' + e
+ filepath = os.path.join(get_tempdir(args.base_dir), filename)
+ shutil.move(filepath, get_deploydir(args.base_dir))
+ filepath = os.path.join(get_deploydir(args.base_dir), filename)
+ m = hashlib.sha1()
+ m.update(open(filepath, 'r').read())
+ open(filepath + '.sha1', 'w').write(m.hexdigest())
+ m = hashlib.sha512()
+ m.update(open(filepath, 'r').read())
+ open(filepath + '.sha512', 'w').write(m.hexdigest())
+
+ # Nightlies do not get tagged so do not need the header
+ if args.version.pre != 'nightly':
+ shutil.copy(os.path.join(get_workdir(args.base_dir),
+ 'subversion', 'include', 'svn_version.h'),
+ os.path.join(get_deploydir(args.base_dir),
+ 'svn_version.h.dist-%s' % str(args.version)))
+
+ # And we're done!
+
+#----------------------------------------------------------------------
+# Sign the candidate release artifacts
+
+def sign_candidates(args):
+ 'Sign candidate artifacts in the dist development directory.'
+
+ def sign_file(filename):
+ asc_file = open(filename + '.asc', 'a')
+ logging.info("Signing %s" % filename)
+ proc = subprocess.check_call(['gpg', '-ba', '-o', '-', filename],
+ stdout=asc_file)
+ asc_file.close()
+
+ target = get_target(args)
+
+ for e in extns:
+ filename = os.path.join(target, 'subversion-%s.%s' % (args.version, e))
+ sign_file(filename)
+ if args.version.major >= 1 and args.version.minor <= 6:
+ filename = os.path.join(target,
+ 'subversion-deps-%s.%s' % (args.version, e))
+ sign_file(filename)
+
+
+#----------------------------------------------------------------------
+# Post the candidate release artifacts
+
+def post_candidates(args):
+ 'Post candidate artifacts to the dist development directory.'
+
+ target = get_target(args)
+
+ logging.info('Importing tarballs to %s' % dist_dev_url)
+ ver = str(args.version)
+ svn_cmd = ['svn', 'import', '-m',
+ 'Add Subversion %s candidate release artifacts' % ver,
+ '--auto-props', '--config-option',
+ 'config:auto-props:*.asc=svn:eol-style=native;svn:mime-type=text/plain',
+ target, dist_dev_url]
+ if (args.username):
+ svn_cmd += ['--username', args.username]
+ subprocess.check_call(svn_cmd)
+
+#----------------------------------------------------------------------
+# Create tag
+
+def create_tag(args):
+ 'Create tag in the repository'
+
+ target = get_target(args)
+
+ logging.info('Creating tag for %s' % str(args.version))
+
+ if not args.branch:
+ args.branch = 'branches/%d.%d.x' % (args.version.major, args.version.minor)
+
+ branch = secure_repos + '/' + args.branch.rstrip('/')
+
+ tag = secure_repos + '/tags/' + str(args.version)
+
+ svnmucc_cmd = ['svnmucc', '-m',
+ 'Tagging release ' + str(args.version)]
+ if (args.username):
+ svnmucc_cmd += ['--username', args.username]
+ svnmucc_cmd += ['cp', str(args.revnum), branch, tag]
+ svnmucc_cmd += ['put', os.path.join(target, 'svn_version.h.dist' + '-' +
+ str(args.version)),
+ tag + '/subversion/include/svn_version.h']
+
+ # don't redirect stdout/stderr since svnmucc might ask for a password
+ try:
+ subprocess.check_call(svnmucc_cmd)
+ except subprocess.CalledProcessError:
+ if args.version.is_prerelease():
+ logging.error("Do you need to pass --branch=trunk?")
+ raise
+
+ if not args.version.is_prerelease():
+ logging.info('Bumping revisions on the branch')
+ def replace_in_place(fd, startofline, flat, spare):
+ """In file object FD, replace FLAT with SPARE in the first line
+ starting with STARTOFLINE."""
+
+ fd.seek(0, os.SEEK_SET)
+ lines = fd.readlines()
+ for i, line in enumerate(lines):
+ if line.startswith(startofline):
+ lines[i] = line.replace(flat, spare)
+ break
+ else:
+ raise RuntimeError('Definition of %r not found' % startofline)
+
+ fd.seek(0, os.SEEK_SET)
+ fd.writelines(lines)
+ fd.truncate() # for current callers, new value is never shorter.
+
+ new_version = Version('%d.%d.%d' %
+ (args.version.major, args.version.minor,
+ args.version.patch + 1))
+
+ def file_object_for(relpath):
+ fd = tempfile.NamedTemporaryFile()
+ url = branch + '/' + relpath
+ fd.url = url
+ subprocess.check_call(['svn', 'cat', '%s@%d' % (url, args.revnum)],
+ stdout=fd)
+ return fd
+
+ svn_version_h = file_object_for('subversion/include/svn_version.h')
+ replace_in_place(svn_version_h, '#define SVN_VER_PATCH ',
+ str(args.version.patch), str(new_version.patch))
+
+ STATUS = file_object_for('STATUS')
+ replace_in_place(STATUS, 'Status of ',
+ str(args.version), str(new_version))
+
+ svn_version_h.seek(0, os.SEEK_SET)
+ STATUS.seek(0, os.SEEK_SET)
+ subprocess.check_call(['svnmucc', '-r', str(args.revnum),
+ '-m', 'Post-release housekeeping: '
+ 'bump the %s branch to %s.'
+ % (branch.split('/')[-1], str(new_version)),
+ 'put', svn_version_h.name, svn_version_h.url,
+ 'put', STATUS.name, STATUS.url,
+ ])
+ del svn_version_h
+ del STATUS
+
+#----------------------------------------------------------------------
+# Clean dist
+
+def clean_dist(args):
+ 'Clean the distribution directory of all but the most recent artifacts.'
+
+ stdout = subprocess.check_output(['svn', 'list', dist_release_url])
+
+ def minor(version):
+ """Return the minor release line of the parameter, which must be
+ a Version object."""
+ return (version.major, version.minor)
+
+ filenames = stdout.split('\n')
+ filenames = filter(lambda x: x.startswith('subversion-'), filenames)
+ versions = set(map(Version, filenames))
+ minor_lines = set(map(minor, versions))
+ to_keep = set()
+ # Keep 3 minor lines: 1.10.0-alpha3, 1.9.7, 1.8.19.
+ # TODO: When we release 1.A.0 GA we'll have to manually remove 1.(A-2).* artifacts.
+ for recent_line in sorted(minor_lines, reverse=True)[:3]:
+ to_keep.add(max(
+ x for x in versions
+ if minor(x) == recent_line
+ ))
+ for i in sorted(to_keep):
+ logging.info("Saving release '%s'", i)
+
+ svnmucc_cmd = ['svnmucc', '-m', 'Remove old Subversion releases.\n' +
+ 'They are still available at ' +
+ 'https://archive.apache.org/dist/subversion/']
+ if (args.username):
+ svnmucc_cmd += ['--username', args.username]
+ for filename in filenames:
+ if Version(filename) not in to_keep:
+ logging.info("Removing %r", filename)
+ svnmucc_cmd += ['rm', dist_release_url + '/' + filename]
+
+ # don't redirect stdout/stderr since svnmucc might ask for a password
+ if 'rm' in svnmucc_cmd:
+ subprocess.check_call(svnmucc_cmd)
+ else:
+ logging.info("Nothing to remove")
+
+#----------------------------------------------------------------------
+# Move to dist
+
+def move_to_dist(args):
+ 'Move candidate artifacts to the distribution directory.'
+
+ stdout = subprocess.check_output(['svn', 'list', dist_dev_url])
+
+ filenames = []
+ for entry in stdout.split('\n'):
+ if fnmatch.fnmatch(entry, 'subversion-%s.*' % str(args.version)):
+ filenames.append(entry)
+ svnmucc_cmd = ['svnmucc', '-m',
+ 'Publish Subversion-%s.' % str(args.version)]
+ if (args.username):
+ svnmucc_cmd += ['--username', args.username]
+ svnmucc_cmd += ['rm', dist_dev_url + '/' + 'svn_version.h.dist'
+ + '-' + str(args.version)]
+ for filename in filenames:
+ svnmucc_cmd += ['mv', dist_dev_url + '/' + filename,
+ dist_release_url + '/' + filename]
+
+ # don't redirect stdout/stderr since svnmucc might ask for a password
+ logging.info('Moving release artifacts to %s' % dist_release_url)
+ subprocess.check_call(svnmucc_cmd)
+
+#----------------------------------------------------------------------
+# Write announcements
+
+def write_news(args):
+ 'Write text for the Subversion website.'
+ data = { 'date' : datetime.date.today().strftime('%Y%m%d'),
+ 'date_pres' : datetime.date.today().strftime('%Y-%m-%d'),
+ 'major-minor' : args.version.branch,
+ 'version' : str(args.version),
+ 'version_base' : args.version.base,
+ 'anchor': args.version.get_download_anchor(),
+ 'is_recommended': ezt_bool(args.version.is_recommended()),
+ }
+
+ if args.version.is_prerelease():
+ template_filename = 'rc-news.ezt'
+ else:
+ template_filename = 'stable-news.ezt'
+
+ template = ezt.Template()
+ template.parse(get_tmplfile(template_filename).read())
+ template.generate(sys.stdout, data)
+
+
+def get_sha1info(args):
+ 'Return a list of sha1 info for the release'
+
+ target = get_target(args)
+
+ sha1s = glob.glob(os.path.join(target, 'subversion*-%s*.sha1' % args.version))
+
+ class info(object):
+ pass
+
+ sha1info = []
+ for s in sha1s:
+ i = info()
+ # strip ".sha1"
+ i.filename = os.path.basename(s)[:-5]
+ i.sha1 = open(s, 'r').read()
+ sha1info.append(i)
+
+ return sha1info
+
+
+def write_announcement(args):
+ 'Write the release announcement.'
+ sha1info = get_sha1info(args)
+ siginfo = "\n".join(get_siginfo(args, True)) + "\n"
+
+ data = { 'version' : str(args.version),
+ 'sha1info' : sha1info,
+ 'siginfo' : siginfo,
+ 'major-minor' : args.version.branch,
+ 'major-minor-patch' : args.version.base,
+ 'anchor' : args.version.get_download_anchor(),
+ }
+
+ if args.version.is_prerelease():
+ template_filename = 'rc-release-ann.ezt'
+ else:
+ data['dot-zero'] = ezt_bool(args.version.patch == 0)
+ # TODO: instead of requiring the RM to remember to pass --security,
+ # read the private repository where CVE announcements are staged,
+ # parse the json file that identifies which versions are affected,
+ # and accordingly automagically set data['security'].
+ data['security'] = ezt_bool(args.security)
+ template_filename = 'stable-release-ann.ezt'
+
+ # The template text assumes these two are mutually exclusive.
+ # If you ever find a reason to make a x.y.0 release with a security
+ # bug, just comment this out and update the template before sending.
+ assert not (data['dot-zero'] and data['security'])
+
+ template = ezt.Template(compress_whitespace = False)
+ template.parse(get_tmplfile(template_filename).read())
+ template.generate(sys.stdout, data)
+
+
+def write_downloads(args):
+ 'Output the download section of the website.'
+ sha1info = get_sha1info(args)
+
+ data = { 'version' : str(args.version),
+ 'fileinfo' : sha1info,
+ }
+
+ template = ezt.Template(compress_whitespace = False)
+ template.parse(get_tmplfile('download.ezt').read())
+ template.generate(sys.stdout, data)
+
+
+#----------------------------------------------------------------------
+# Validate the signatures for a release
+
+key_start = '-----BEGIN PGP SIGNATURE-----'
+key_end = '-----END PGP SIGNATURE-----'
+
+PUBLIC_KEY_ALGORITHMS = {
+ # These values are taken from the RFC's registry at:
+ # https://www.iana.org/assignments/pgp-parameters/pgp-parameters.xhtml#pgp-parameters-12
+ #
+ # The values are callables that produce gpg1-like key length and type
+ # indications, e.g., "4096R" for a 4096-bit RSA key.
+ 1: (lambda keylen: str(keylen) + 'R'), # RSA
+}
+
+def _make_human_readable_fingerprint(fingerprint):
+ return re.compile(r'(....)' * 10).sub(r'\1 \2 \3 \4 \5 \6 \7 \8 \9 \10',
+ fingerprint)
+
+def get_siginfo(args, quiet=False):
+ 'Returns a list of signatures for the release.'
+
+ try:
+ import gnupg
+ except ImportError:
+ import security._gnupg as gnupg
+ gpg = gnupg.GPG()
+
+ target = get_target(args)
+
+ good_sigs = {}
+ fingerprints = {}
+ output = []
+
+ glob_pattern = os.path.join(target, 'subversion*-%s*.asc' % args.version)
+ for filename in glob.glob(glob_pattern):
+ text = open(filename).read()
+ keys = text.split(key_start)
+
+ # Check the keys file syntax. We've been bitten in the past
+ # with syntax errors in the key delimiters that GPG didn't
+ # catch for us, but the ASF key checker tool did.
+ if keys[0]:
+ sys.stderr.write("SYNTAX ERROR: %s does not start with '%s'\n"
+ % (filename, key_start))
+ sys.exit(1)
+ keys = keys[1:]
+
+ if not quiet:
+ logging.info("Checking %d sig(s) in %s" % (len(keys), filename))
+
+ n = 0
+ for key in keys:
+ n += 1
+ if not key.rstrip().endswith(key_end):
+ sys.stderr.write("SYNTAX ERROR: Key %d in %s"
+ " does not end with '%s'\n"
+ % (n, filename, key_end))
+ sys.exit(1)
+
+ fd, fn = tempfile.mkstemp()
+ os.write(fd, key_start + key)
+ os.close(fd)
+ verified = gpg.verify_file(open(fn, 'rb'), filename[:-4])
+ os.unlink(fn)
+
+ if verified.valid:
+ good_sigs[verified.fingerprint] = True
+ else:
+ sys.stderr.write("BAD SIGNATURE: Key %d in %s\n"
+ % (n, filename))
+ if verified.key_id:
+ sys.stderr.write(" key id: %s\n" % verified.key_id)
+ sys.exit(1)
+
+ for id in good_sigs.keys():
+ # Most potential signers have public short keyid (32-bit) collisions in
+ # the https://evil32.com/ set, which has been uploaded to the
+ # keyservers, so generate the long keyid (see use of LONG_KEY_ID below).
+ #
+ # TODO: in the future it'd be nice to use the 'gnupg' module here.
+ gpg_output = subprocess.check_output(
+ ['gpg', '--fixed-list-mode', '--with-colons', '--fingerprint', id],
+ stderr=subprocess.STDOUT,
+ )
+ gpg_output = gpg_output.splitlines()
+
+ # This code was added in r934990, but there was no comment (nor log
+ # message text) explaining its purpose. I've commented it out since
+ # ignoring arbitrary warnings in a verification codepath is Bad. If
+ # you run into warnings on your machine, feel free to uncomment it,
+ # but when you do so please make it match specific warnings only.
+ #
+ #gpg_output = "\n".join([ l for l in gpg_output.splitlines()
+ # if l[0:7] != 'Warning' ])
+
+ # Parse gpg's output. This happens to work for both gpg1 and gpg2,
+ # even though their outputs are slightly different.
+ #
+ # See http://git.gnupg.org/cgi-bin/gitweb.cgi?p=gnupg.git;a=blob_plain;f=doc/DETAILS
+ for line in gpg_output:
+ parts = line.split(':')
+ if parts[0] == 'pub':
+ keylen = int(parts[2])
+ keytype = int(parts[3])
+ formatter = PUBLIC_KEY_ALGORITHMS[keytype]
+ long_key_id = parts[4]
+ length_and_type = formatter(keylen) + '/' + long_key_id
+ del keylen, keytype, formatter, long_key_id
+ break
+ else:
+ raise RuntimeError("Failed to determine LONG_KEY_ID")
+ for line in gpg_output:
+ parts = line.split(':')
+ if parts[0] == 'fpr':
+ fingerprint = parts[9]
+ break
+ else:
+ raise RuntimeError("Failed to determine FINGERPRINT")
+ for line in gpg_output:
+ parts = line.split(':')
+ if parts[0] == 'uid':
+ name = parts[9].split(' <')[0]
+ break
+ else:
+ raise RuntimeError("Failed to determine NAME")
+
+ format_expandos = dict(
+ name=name,
+ length_and_type=length_and_type,
+ fingerprint=_make_human_readable_fingerprint(fingerprint),
+ )
+ del name, length_and_type, fingerprint
+ line = " {name} [{length_and_type}] with fingerprint:"
+ output.append( line.format(**format_expandos) )
+ line = " {fingerprint}"
+ output.append( line.format(**format_expandos) )
+
+ return output
+
+def check_sigs(args):
+ 'Check the signatures for the release.'
+
+ output = get_siginfo(args)
+ for line in output:
+ print(line)
+
+def get_keys(args):
+ 'Import the LDAP-based KEYS file to gpg'
+ # We use a tempfile because urlopen() objects don't have a .fileno()
+ with tempfile.SpooledTemporaryFile() as fd:
+ fd.write(urllib2.urlopen(KEYS).read())
+ fd.flush()
+ fd.seek(0)
+ subprocess.check_call(['gpg', '--import'], stdin=fd)
+
+#----------------------------------------------------------------------
+# Main entry point for argument parsing and handling
+
+def main():
+ 'Parse arguments, and drive the appropriate subcommand.'
+
+ # Setup our main parser
+ parser = argparse.ArgumentParser(
+ description='Create an Apache Subversion release.')
+ parser.add_argument('--clean', action='store_true', default=False,
+ help='Remove any directories previously created by %(prog)s')
+ parser.add_argument('--verbose', action='store_true', default=False,
+ help='Increase output verbosity')
+ parser.add_argument('--base-dir', default=os.getcwd(),
+ help='''The directory in which to create needed files and
+ folders. The default is the current working
+ directory.''')
+ subparsers = parser.add_subparsers(title='subcommands')
+
+ # Setup the parser for the build-env subcommand
+ subparser = subparsers.add_parser('build-env',
+ help='''Download release prerequisistes, including autoconf,
+ libtool, and swig.''')
+ subparser.set_defaults(func=build_env)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--sf-mirror', default='softlayer',
+ help='''The mirror to use for downloading files from
+ SourceForge. If in the EU, you may want to use
+ 'kent' for this value.''')
+ subparser.add_argument('--use-existing', action='store_true', default=False,
+ help='''Attempt to use existing build dependencies before
+ downloading and building a private set.''')
+
+ # Setup the parser for the roll subcommand
+ subparser = subparsers.add_parser('roll',
+ help='''Create the release artifacts.''')
+ subparser.set_defaults(func=roll_tarballs)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
+ help='''The revision number to base the release on.''')
+ subparser.add_argument('--branch',
+ help='''The branch to base the release on,
+ relative to ^/subversion/.''')
+ subparser.add_argument('--patches',
+ help='''The path to the directory containing patches.''')
+
+ # Setup the parser for the sign-candidates subcommand
+ subparser = subparsers.add_parser('sign-candidates',
+ help='''Sign the release artifacts.''')
+ subparser.set_defaults(func=sign_candidates)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+
+ # Setup the parser for the post-candidates subcommand
+ subparser = subparsers.add_parser('post-candidates',
+ help='''Commit candidates to the release development area
+ of the dist.apache.org repository.''')
+ subparser.set_defaults(func=post_candidates)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--username',
+ help='''Username for ''' + dist_repos + '''.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+
+ # Setup the parser for the create-tag subcommand
+ subparser = subparsers.add_parser('create-tag',
+ help='''Create the release tag.''')
+ subparser.set_defaults(func=create_tag)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('revnum', type=lambda arg: int(arg.lstrip('r')),
+ help='''The revision number to base the release on.''')
+ subparser.add_argument('--branch',
+ help='''The branch to base the release on,
+ relative to ^/subversion/.''')
+ subparser.add_argument('--username',
+ help='''Username for ''' + secure_repos + '''.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+
+ # The clean-dist subcommand
+ subparser = subparsers.add_parser('clean-dist',
+ help='''Clean the distribution directory (and mirrors) of
+ all but the most recent MAJOR.MINOR release.''')
+ subparser.set_defaults(func=clean_dist)
+ subparser.add_argument('--dist-dir',
+ help='''The directory to clean.''')
+ subparser.add_argument('--username',
+ help='''Username for ''' + dist_repos + '''.''')
+
+ # The move-to-dist subcommand
+ subparser = subparsers.add_parser('move-to-dist',
+ help='''Move candiates and signatures from the temporary
+ release dev location to the permanent distribution
+ directory.''')
+ subparser.set_defaults(func=move_to_dist)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--username',
+ help='''Username for ''' + dist_repos + '''.''')
+
+ # The write-news subcommand
+ subparser = subparsers.add_parser('write-news',
+ help='''Output to stdout template text for use in the news
+ section of the Subversion website.''')
+ subparser.set_defaults(func=write_news)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+
+ # write-announcement
+ subparser = subparsers.add_parser('write-announcement',
+ help='''Output to stdout template text for the emailed
+ release announcement.''')
+ subparser.set_defaults(func=write_announcement)
+ subparser.add_argument('--security', action='store_true', default=False,
+ help='''The release being announced includes security
+ fixes.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+
+ # write-downloads
+ subparser = subparsers.add_parser('write-downloads',
+ help='''Output to stdout template text for the download
+ table for subversion.apache.org''')
+ subparser.set_defaults(func=write_downloads)
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+
+ # check-sigs
+ subparser = subparsers.add_parser('check-sigs',
+ help='''Output to stdout the signatures collected for this
+ release''')
+ subparser.set_defaults(func=check_sigs)
+ subparser.add_argument('version', type=Version,
+ help='''The release label, such as '1.7.0-alpha1'.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
+
+ # get-keys
+ subparser = subparsers.add_parser('get-keys',
+ help='''Import committers' public keys to ~/.gpg/''')
+ subparser.set_defaults(func=get_keys)
+
+ # A meta-target
+ subparser = subparsers.add_parser('clean',
+ help='''The same as the '--clean' switch, but as a
+ separate subcommand.''')
+ subparser.set_defaults(func=cleanup)
+
+ # Parse the arguments
+ args = parser.parse_args()
+
+ # first, process any global operations
+ if args.clean:
+ cleanup(args)
+
+ # Set up logging
+ logger = logging.getLogger()
+ if args.verbose:
+ logger.setLevel(logging.DEBUG)
+ else:
+ logger.setLevel(logging.INFO)
+
+ # Fix up our path so we can use our installed versions
+ os.environ['PATH'] = os.path.join(get_prefix(args.base_dir), 'bin') + ':' \
+ + os.environ['PATH']
+
+ # Make timestamps in tarballs independent of local timezone
+ os.environ['TZ'] = 'UTC'
+
+ # finally, run the subcommand, and give it the parsed arguments
+ args.func(args)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/dist/security/__init__.py b/tools/dist/security/__init__.py
new file mode 100644
index 0000000..fe95886
--- /dev/null
+++ b/tools/dist/security/__init__.py
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
diff --git a/tools/dist/security/_gnupg.py b/tools/dist/security/_gnupg.py
new file mode 100644
index 0000000..037dbf3
--- /dev/null
+++ b/tools/dist/security/_gnupg.py
@@ -0,0 +1,1486 @@
+# Copyright (c) 2008-2014 by Vinay Sajip.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+# * The name(s) of the copyright holder(s) may not be used to endorse or
+# promote products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) "AS IS" AND ANY EXPRESS OR
+# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+# EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+""" A wrapper for the 'gpg' command::
+
+Portions of this module are derived from A.M. Kuchling's well-designed
+GPG.py, using Richard Jones' updated version 1.3, which can be found
+in the pycrypto CVS repository on Sourceforge:
+
+http://pycrypto.cvs.sourceforge.net/viewvc/pycrypto/gpg/GPG.py
+
+This module is *not* forward-compatible with amk's; some of the
+old interface has changed. For instance, since I've added decrypt
+functionality, I elected to initialize with a 'gnupghome' argument
+instead of 'keyring', so that gpg can find both the public and secret
+keyrings. I've also altered some of the returned objects in order for
+the caller to not have to know as much about the internals of the
+result classes.
+
+While the rest of ISconf is released under the GPL, I am releasing
+this single file under the same terms that A.M. Kuchling used for
+pycrypto.
+
+Steve Traugott, stevegt@terraluna.org
+Thu Jun 23 21:27:20 PDT 2005
+
+This version of the module has been modified from Steve Traugott's version
+(see http://trac.t7a.org/isconf/browser/trunk/lib/python/isconf/GPG.py) by
+Vinay Sajip to make use of the subprocess module (Steve's version uses os.fork()
+and so does not work on Windows). Renamed to gnupg.py to avoid confusion with
+the previous versions.
+
+Modifications Copyright (C) 2008-2017 Vinay Sajip. All rights reserved.
+
+A unittest harness (test_gnupg.py) has also been added.
+"""
+
+__version__ = "0.4.1"
+__author__ = "Vinay Sajip"
+__date__ = "$07-Jul-2017 15:09:20$"
+
+try:
+ from io import StringIO
+except ImportError: # pragma: no cover
+ from cStringIO import StringIO
+
+import codecs
+import locale
+import logging
+import os
+import re
+import socket
+from subprocess import Popen
+from subprocess import PIPE
+import sys
+import threading
+
+STARTUPINFO = None
+if os.name == 'nt': # pragma: no cover
+ try:
+ from subprocess import STARTUPINFO, STARTF_USESHOWWINDOW, SW_HIDE
+ except ImportError:
+ STARTUPINFO = None
+
+try:
+ import logging.NullHandler as NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def handle(self, record):
+ pass
+try:
+ unicode
+ _py3k = False
+ string_types = basestring
+ text_type = unicode
+except NameError:
+ _py3k = True
+ string_types = str
+ text_type = str
+
+logger = logging.getLogger(__name__)
+if not logger.handlers:
+ logger.addHandler(NullHandler())
+
+# We use the test below because it works for Jython as well as CPython
+if os.path.__name__ == 'ntpath': # pragma: no cover
+ # On Windows, we don't need shell quoting, other than worrying about
+ # paths with spaces in them.
+ def shell_quote(s):
+ return '"%s"' % s
+else:
+ # Section copied from sarge
+
+ # This regex determines which shell input needs quoting
+ # because it may be unsafe
+ UNSAFE = re.compile(r'[^\w%+,./:=@-]')
+
+ def shell_quote(s):
+ """
+ Quote text so that it is safe for Posix command shells.
+
+ For example, "*.py" would be converted to "'*.py'". If the text is
+ considered safe it is returned unquoted.
+
+ :param s: The value to quote
+ :type s: str (or unicode on 2.x)
+ :return: A safe version of the input, from the point of view of Posix
+ command shells
+ :rtype: The passed-in type
+ """
+ if not isinstance(s, string_types): # pragma: no cover
+ raise TypeError('Expected string type, got %s' % type(s))
+ if not s:
+ result = "''"
+ elif not UNSAFE.search(s):
+ result = s
+ else:
+ result = "'%s'" % s.replace("'", r"'\''")
+ return result
+
+ # end of sarge code
+
+# Now that we use shell=False, we shouldn't need to quote arguments.
+# Use no_quote instead of shell_quote to remind us of where quoting
+# was needed. However, note that we still need, on 2.x, to encode any
+# Unicode argument with the file system encoding - see Issue #41 and
+# Python issue #1759845 ("subprocess.call fails with unicode strings in
+# command line").
+
+# Allows the encoding used to be overridden in special cases by setting
+# this module attribute appropriately.
+fsencoding = sys.getfilesystemencoding()
+
+def no_quote(s):
+ if not _py3k and isinstance(s, text_type):
+ s = s.encode(fsencoding)
+ return s
+
+def _copy_data(instream, outstream):
+ # Copy one stream to another
+ sent = 0
+ if hasattr(sys.stdin, 'encoding'):
+ enc = sys.stdin.encoding
+ else: # pragma: no cover
+ enc = 'ascii'
+ while True:
+ # See issue #39: read can fail when e.g. a text stream is provided
+ # for what is actually a binary file
+ try:
+ data = instream.read(1024)
+ except UnicodeError:
+ logger.warning('Exception occurred while reading', exc_info=1)
+ break
+ if not data:
+ break
+ sent += len(data)
+ # logger.debug("sending chunk (%d): %r", sent, data[:256])
+ try:
+ outstream.write(data)
+ except UnicodeError: # pragma: no cover
+ outstream.write(data.encode(enc))
+ except:
+ # Can sometimes get 'broken pipe' errors even when the data has all
+ # been sent
+ logger.exception('Error sending data')
+ break
+ try:
+ outstream.close()
+ except IOError: # pragma: no cover
+ logger.warning('Exception occurred while closing: ignored', exc_info=1)
+ logger.debug("closed output, %d bytes sent", sent)
+
+def _threaded_copy_data(instream, outstream):
+ wr = threading.Thread(target=_copy_data, args=(instream, outstream))
+ wr.setDaemon(True)
+ logger.debug('data copier: %r, %r, %r', wr, instream, outstream)
+ wr.start()
+ return wr
+
+def _write_passphrase(stream, passphrase, encoding):
+ passphrase = '%s\n' % passphrase
+ passphrase = passphrase.encode(encoding)
+ stream.write(passphrase)
+ logger.debug('Wrote passphrase')
+
+def _is_sequence(instance):
+ return isinstance(instance, (list, tuple, set, frozenset))
+
+def _make_memory_stream(s):
+ try:
+ from io import BytesIO
+ rv = BytesIO(s)
+ except ImportError: # pragma: no cover
+ rv = StringIO(s)
+ return rv
+
+def _make_binary_stream(s, encoding):
+ if _py3k:
+ if isinstance(s, str):
+ s = s.encode(encoding)
+ else:
+ if type(s) is not str:
+ s = s.encode(encoding)
+ return _make_memory_stream(s)
+
+class Verify(object):
+ "Handle status messages for --verify"
+
+ TRUST_UNDEFINED = 0
+ TRUST_NEVER = 1
+ TRUST_MARGINAL = 2
+ TRUST_FULLY = 3
+ TRUST_ULTIMATE = 4
+
+ TRUST_LEVELS = {
+ "TRUST_UNDEFINED" : TRUST_UNDEFINED,
+ "TRUST_NEVER" : TRUST_NEVER,
+ "TRUST_MARGINAL" : TRUST_MARGINAL,
+ "TRUST_FULLY" : TRUST_FULLY,
+ "TRUST_ULTIMATE" : TRUST_ULTIMATE,
+ }
+
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.valid = False
+ self.fingerprint = self.creation_date = self.timestamp = None
+ self.signature_id = self.key_id = None
+ self.username = None
+ self.key_status = None
+ self.status = None
+ self.pubkey_fingerprint = None
+ self.expire_timestamp = None
+ self.sig_timestamp = None
+ self.trust_text = None
+ self.trust_level = None
+
+ def __nonzero__(self):
+ return self.valid
+
+ __bool__ = __nonzero__
+
+ def handle_status(self, key, value):
+ if key in self.TRUST_LEVELS:
+ self.trust_text = key
+ self.trust_level = self.TRUST_LEVELS[key]
+ elif key in ("WARNING", "ERROR"):
+ logger.warning('potential problem: %s: %s', key, value)
+ elif key == "BADSIG": # pragma: no cover
+ self.valid = False
+ self.status = 'signature bad'
+ self.key_id, self.username = value.split(None, 1)
+ elif key == "ERRSIG": # pragma: no cover
+ self.valid = False
+ (self.key_id,
+ algo, hash_algo,
+ cls,
+ self.timestamp) = value.split()[:5]
+ self.status = 'signature error'
+ elif key == "EXPSIG": # pragma: no cover
+ self.valid = False
+ self.status = 'signature expired'
+ self.key_id, self.username = value.split(None, 1)
+ elif key == "GOODSIG":
+ self.valid = True
+ self.status = 'signature good'
+ self.key_id, self.username = value.split(None, 1)
+ elif key == "VALIDSIG":
+ (self.fingerprint,
+ self.creation_date,
+ self.sig_timestamp,
+ self.expire_timestamp) = value.split()[:4]
+ # may be different if signature is made with a subkey
+ self.pubkey_fingerprint = value.split()[-1]
+ self.status = 'signature valid'
+ elif key == "SIG_ID":
+ (self.signature_id,
+ self.creation_date, self.timestamp) = value.split()
+ elif key == "DECRYPTION_FAILED": # pragma: no cover
+ self.valid = False
+ self.key_id = value
+ self.status = 'decryption failed'
+ elif key == "NO_PUBKEY": # pragma: no cover
+ self.valid = False
+ self.key_id = value
+ self.status = 'no public key'
+ elif key in ("EXPKEYSIG", "REVKEYSIG"): # pragma: no cover
+ # signed with expired or revoked key
+ self.valid = False
+ self.key_id = value.split()[0]
+ if key == "EXPKEYSIG":
+ self.key_status = 'signing key has expired'
+ else:
+ self.key_status = 'signing key was revoked'
+ self.status = self.key_status
+ elif key in ("UNEXPECTED", "FAILURE"): # pragma: no cover
+ self.valid = False
+ self.key_id = value
+ if key == "UNEXPECTED":
+ self.status = 'unexpected data'
+ else:
+ # N.B. there might be other reasons
+ if not self.status:
+ self.status = 'incorrect passphrase'
+ elif key in ("DECRYPTION_INFO", "PLAINTEXT", "PLAINTEXT_LENGTH",
+ "NO_SECKEY", "BEGIN_SIGNING"):
+ pass
+ else: # pragma: no cover
+ logger.debug('message ignored: %s, %s', key, value)
+
+class ImportResult(object):
+ "Handle status messages for --import"
+
+ counts = '''count no_user_id imported imported_rsa unchanged
+ n_uids n_subk n_sigs n_revoc sec_read sec_imported
+ sec_dups not_imported'''.split()
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.imported = []
+ self.results = []
+ self.fingerprints = []
+ for result in self.counts:
+ setattr(self, result, None)
+
+ def __nonzero__(self):
+ if self.not_imported: return False
+ if not self.fingerprints: return False
+ return True
+
+ __bool__ = __nonzero__
+
+ ok_reason = {
+ '0': 'Not actually changed',
+ '1': 'Entirely new key',
+ '2': 'New user IDs',
+ '4': 'New signatures',
+ '8': 'New subkeys',
+ '16': 'Contains private key',
+ }
+
+ problem_reason = {
+ '0': 'No specific reason given',
+ '1': 'Invalid Certificate',
+ '2': 'Issuer Certificate missing',
+ '3': 'Certificate Chain too long',
+ '4': 'Error storing certificate',
+ }
+
+ def handle_status(self, key, value):
+ if key in ("WARNING", "ERROR"):
+ logger.warning('potential problem: %s: %s', key, value)
+ elif key in ("IMPORTED", "KEY_CONSIDERED"):
+ # this duplicates info we already see in import_ok & import_problem
+ pass
+ elif key == "NODATA": # pragma: no cover
+ self.results.append({'fingerprint': None,
+ 'problem': '0', 'text': 'No valid data found'})
+ elif key == "IMPORT_OK":
+ reason, fingerprint = value.split()
+ reasons = []
+ for code, text in list(self.ok_reason.items()):
+ if int(reason) | int(code) == int(reason):
+ reasons.append(text)
+ reasontext = '\n'.join(reasons) + "\n"
+ self.results.append({'fingerprint': fingerprint,
+ 'ok': reason, 'text': reasontext})
+ self.fingerprints.append(fingerprint)
+ elif key == "IMPORT_PROBLEM": # pragma: no cover
+ try:
+ reason, fingerprint = value.split()
+ except:
+ reason = value
+ fingerprint = '<unknown>'
+ self.results.append({'fingerprint': fingerprint,
+ 'problem': reason, 'text': self.problem_reason[reason]})
+ elif key == "IMPORT_RES":
+ import_res = value.split()
+ for i, count in enumerate(self.counts):
+ setattr(self, count, int(import_res[i]))
+ elif key == "KEYEXPIRED": # pragma: no cover
+ self.results.append({'fingerprint': None,
+ 'problem': '0', 'text': 'Key expired'})
+ elif key == "SIGEXPIRED": # pragma: no cover
+ self.results.append({'fingerprint': None,
+ 'problem': '0', 'text': 'Signature expired'})
+ elif key == "FAILURE": # pragma: no cover
+ self.results.append({'fingerprint': None,
+ 'problem': '0', 'text': 'Other failure'})
+ else: # pragma: no cover
+ logger.debug('message ignored: %s, %s', key, value)
+
+ def summary(self):
+ l = []
+ l.append('%d imported' % self.imported)
+ if self.not_imported: # pragma: no cover
+ l.append('%d not imported' % self.not_imported)
+ return ', '.join(l)
+
+ESCAPE_PATTERN = re.compile(r'\\x([0-9a-f][0-9a-f])', re.I)
+BASIC_ESCAPES = {
+ r'\n': '\n',
+ r'\r': '\r',
+ r'\f': '\f',
+ r'\v': '\v',
+ r'\b': '\b',
+ r'\0': '\0',
+}
+
+class SendResult(object):
+ def __init__(self, gpg):
+ self.gpg = gpg
+
+ def handle_status(self, key, value):
+ logger.debug('SendResult: %s: %s', key, value)
+
+class SearchKeys(list):
+ ''' Handle status messages for --search-keys.
+
+ Handle pub and uid (relating the latter to the former).
+
+ Don't care about the rest
+ '''
+
+ UID_INDEX = 1
+ FIELDS = 'type keyid algo length date expires'.split()
+
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.curkey = None
+ self.fingerprints = []
+ self.uids = []
+
+ def get_fields(self, args):
+ result = {}
+ for i, var in enumerate(self.FIELDS):
+ if i < len(args):
+ result[var] = args[i]
+ else:
+ result[var] = 'unavailable'
+ result['uids'] = []
+ result['sigs'] = []
+ return result
+
+ def pub(self, args):
+ self.curkey = curkey = self.get_fields(args)
+ self.append(curkey)
+
+ def uid(self, args):
+ uid = args[self.UID_INDEX]
+ uid = ESCAPE_PATTERN.sub(lambda m: chr(int(m.group(1), 16)), uid)
+ for k, v in BASIC_ESCAPES.items():
+ uid = uid.replace(k, v)
+ self.curkey['uids'].append(uid)
+ self.uids.append(uid)
+
+ def handle_status(self, key, value): # pragma: no cover
+ pass
+
+class ListKeys(SearchKeys):
+ ''' Handle status messages for --list-keys, --list-sigs.
+
+ Handle pub and uid (relating the latter to the former).
+
+ Don't care about (info from src/DETAILS):
+
+ crt = X.509 certificate
+ crs = X.509 certificate and private key available
+ uat = user attribute (same as user id except for field 10).
+ sig = signature
+ rev = revocation signature
+ pkd = public key data (special field format, see below)
+ grp = reserved for gpgsm
+ rvk = revocation key
+ '''
+
+ UID_INDEX = 9
+ FIELDS = 'type trust length algo keyid date expires dummy ownertrust uid sig'.split()
+
+ def __init__(self, gpg):
+ super(ListKeys, self).__init__(gpg)
+ self.in_subkey = False
+ self.key_map = {}
+
+ def key(self, args):
+ self.curkey = curkey = self.get_fields(args)
+ if curkey['uid']:
+ curkey['uids'].append(curkey['uid'])
+ del curkey['uid']
+ curkey['subkeys'] = []
+ self.append(curkey)
+ self.in_subkey = False
+
+ pub = sec = key
+
+ def fpr(self, args):
+ fp = args[9]
+ if fp in self.key_map: # pragma: no cover
+ raise ValueError('Unexpected fingerprint collision: %s' % fp)
+ if not self.in_subkey:
+ self.curkey['fingerprint'] = fp
+ self.fingerprints.append(fp)
+ self.key_map[fp] = self.curkey
+ else:
+ self.curkey['subkeys'][-1].append(fp)
+ self.key_map[fp] = self.curkey
+
+ def sub(self, args):
+ subkey = [args[4], args[11]] # keyid, type
+ self.curkey['subkeys'].append(subkey)
+ self.in_subkey = True
+
+ def ssb(self, args):
+ subkey = [args[4], None] # keyid, type
+ self.curkey['subkeys'].append(subkey)
+ self.in_subkey = True
+
+ def sig(self, args):
+ # keyid, uid, sigclass
+ self.curkey['sigs'].append((args[4], args[9], args[10]))
+
+class ScanKeys(ListKeys):
+ ''' Handle status messages for --with-fingerprint.'''
+
+ def sub(self, args):
+ # --with-fingerprint --with-colons somehow outputs fewer colons,
+ # use the last value args[-1] instead of args[11]
+ subkey = [args[4], args[-1]]
+ self.curkey['subkeys'].append(subkey)
+ self.in_subkey = True
+
+class TextHandler(object):
+ def _as_text(self):
+ return self.data.decode(self.gpg.encoding, self.gpg.decode_errors)
+
+ if _py3k:
+ __str__ = _as_text
+ else:
+ __unicode__ = _as_text
+
+ def __str__(self):
+ return self.data
+
+
+class Crypt(Verify, TextHandler):
+ "Handle status messages for --encrypt and --decrypt"
+ def __init__(self, gpg):
+ Verify.__init__(self, gpg)
+ self.data = ''
+ self.ok = False
+ self.status = ''
+
+ def __nonzero__(self):
+ if self.ok: return True
+ return False
+
+ __bool__ = __nonzero__
+
+ def handle_status(self, key, value):
+ if key in ("WARNING", "ERROR"):
+ logger.warning('potential problem: %s: %s', key, value)
+ elif key == "NODATA":
+ self.status = "no data was provided"
+ elif key in ("NEED_PASSPHRASE", "BAD_PASSPHRASE", "GOOD_PASSPHRASE",
+ "MISSING_PASSPHRASE", "DECRYPTION_FAILED",
+ "KEY_NOT_CREATED", "NEED_PASSPHRASE_PIN"):
+ self.status = key.replace("_", " ").lower()
+ elif key == "NEED_PASSPHRASE_SYM":
+ self.status = 'need symmetric passphrase'
+ elif key == "BEGIN_DECRYPTION":
+ self.status = 'decryption incomplete'
+ elif key == "BEGIN_ENCRYPTION":
+ self.status = 'encryption incomplete'
+ elif key == "DECRYPTION_OKAY":
+ self.status = 'decryption ok'
+ self.ok = True
+ elif key == "END_ENCRYPTION":
+ self.status = 'encryption ok'
+ self.ok = True
+ elif key == "INV_RECP": # pragma: no cover
+ self.status = 'invalid recipient'
+ elif key == "KEYEXPIRED": # pragma: no cover
+ self.status = 'key expired'
+ elif key == "SIG_CREATED": # pragma: no cover
+ self.status = 'sig created'
+ elif key == "SIGEXPIRED": # pragma: no cover
+ self.status = 'sig expired'
+ elif key in ("ENC_TO", "USERID_HINT", "GOODMDC",
+ "END_DECRYPTION", "CARDCTRL", "BADMDC",
+ "SC_OP_FAILURE", "SC_OP_SUCCESS",
+ "PINENTRY_LAUNCHED", "KEY_CONSIDERED"):
+ pass
+ else:
+ Verify.handle_status(self, key, value)
+
+class GenKey(object):
+ "Handle status messages for --gen-key"
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.type = None
+ self.fingerprint = None
+
+ def __nonzero__(self):
+ if self.fingerprint: return True
+ return False
+
+ __bool__ = __nonzero__
+
+ def __str__(self):
+ return self.fingerprint or ''
+
+ def handle_status(self, key, value):
+ if key in ("WARNING", "ERROR"): # pragma: no cover
+ logger.warning('potential problem: %s: %s', key, value)
+ elif key == "KEY_CREATED":
+ (self.type,self.fingerprint) = value.split()
+ elif key in ("PROGRESS", "GOOD_PASSPHRASE", "KEY_NOT_CREATED"):
+ pass
+ else: # pragma: no cover
+ logger.debug('message ignored: %s, %s', key, value)
+
+class ExportResult(GenKey):
+ """Handle status messages for --export[-secret-key].
+
+ For now, just use an existing class to base it on - if needed, we
+ can override handle_status for more specific message handling.
+ """
+ def handle_status(self, key, value):
+ if key in ("EXPORTED", "EXPORT_RES"):
+ pass
+ else:
+ super(ExportResult, self).handle_status(key, value)
+
+class DeleteResult(object):
+ "Handle status messages for --delete-key and --delete-secret-key"
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.status = 'ok'
+
+ def __str__(self):
+ return self.status
+
+ problem_reason = {
+ '1': 'No such key',
+ '2': 'Must delete secret key first',
+ '3': 'Ambiguous specification',
+ }
+
+ def handle_status(self, key, value):
+ if key == "DELETE_PROBLEM": # pragma: no cover
+ self.status = self.problem_reason.get(value,
+ "Unknown error: %r" % value)
+ else: # pragma: no cover
+ logger.debug('message ignored: %s, %s', key, value)
+
+ def __nonzero__(self):
+ return self.status == 'ok'
+
+ __bool__ = __nonzero__
+
+
+class Sign(TextHandler):
+ "Handle status messages for --sign"
+ def __init__(self, gpg):
+ self.gpg = gpg
+ self.type = None
+ self.hash_algo = None
+ self.fingerprint = None
+ self.status = None
+
+ def __nonzero__(self):
+ return self.fingerprint is not None
+
+ __bool__ = __nonzero__
+
+ def handle_status(self, key, value):
+ if key in ("WARNING", "ERROR", "FAILURE"): # pragma: no cover
+ logger.warning('potential problem: %s: %s', key, value)
+ elif key in ("KEYEXPIRED", "SIGEXPIRED"): # pragma: no cover
+ self.status = 'key expired'
+ elif key == "KEYREVOKED": # pragma: no cover
+ self.status = 'key revoked'
+ elif key == "SIG_CREATED":
+ (self.type,
+ algo, self.hash_algo, cls,
+ self.timestamp, self.fingerprint
+ ) = value.split()
+ self.status = 'signature created'
+ elif key in ("USERID_HINT", "NEED_PASSPHRASE", "GOOD_PASSPHRASE",
+ "BAD_PASSPHRASE", "BEGIN_SIGNING"):
+ pass
+ else: # pragma: no cover
+ logger.debug('message ignored: %s, %s', key, value)
+
+VERSION_RE = re.compile(r'gpg \(GnuPG\) (\d+(\.\d+)*)'.encode('ascii'), re.I)
+HEX_DIGITS_RE = re.compile(r'[0-9a-f]+$', re.I)
+
+class GPG(object):
+
+ decode_errors = 'strict'
+
+ result_map = {
+ 'crypt': Crypt,
+ 'delete': DeleteResult,
+ 'generate': GenKey,
+ 'import': ImportResult,
+ 'send': SendResult,
+ 'list': ListKeys,
+ 'scan': ScanKeys,
+ 'search': SearchKeys,
+ 'sign': Sign,
+ 'verify': Verify,
+ 'export': ExportResult,
+ }
+
+ "Encapsulate access to the gpg executable"
+ def __init__(self, gpgbinary='gpg', gnupghome=None, verbose=False,
+ use_agent=False, keyring=None, options=None,
+ secret_keyring=None):
+ """Initialize a GPG process wrapper. Options are:
+
+ gpgbinary -- full pathname for GPG binary.
+
+ gnupghome -- full pathname to where we can find the public and
+ private keyrings. Default is whatever gpg defaults to.
+ keyring -- name of alternative keyring file to use, or list of such
+ keyrings. If specified, the default keyring is not used.
+ options =-- a list of additional options to pass to the GPG binary.
+ secret_keyring -- name of alternative secret keyring file to use, or
+ list of such keyrings.
+ """
+ self.gpgbinary = gpgbinary
+ self.gnupghome = gnupghome
+ if keyring:
+ # Allow passing a string or another iterable. Make it uniformly
+ # a list of keyring filenames
+ if isinstance(keyring, string_types):
+ keyring = [keyring]
+ self.keyring = keyring
+ if secret_keyring:
+ # Allow passing a string or another iterable. Make it uniformly
+ # a list of keyring filenames
+ if isinstance(secret_keyring, string_types):
+ secret_keyring = [secret_keyring]
+ self.secret_keyring = secret_keyring
+ self.verbose = verbose
+ self.use_agent = use_agent
+ if isinstance(options, str): # pragma: no cover
+ options = [options]
+ self.options = options
+ self.on_data = None # or a callable - will be called with data chunks
+ # Changed in 0.3.7 to use Latin-1 encoding rather than
+ # locale.getpreferredencoding falling back to sys.stdin.encoding
+ # falling back to utf-8, because gpg itself uses latin-1 as the default
+ # encoding.
+ self.encoding = 'latin-1'
+ if gnupghome and not os.path.isdir(self.gnupghome):
+ os.makedirs(self.gnupghome,0x1C0)
+ try:
+ p = self._open_subprocess(["--version"])
+ except OSError:
+ msg = 'Unable to run gpg - it may not be available.'
+ logger.exception(msg)
+ raise OSError(msg)
+ result = self.result_map['verify'](self) # any result will do for this
+ self._collect_output(p, result, stdin=p.stdin)
+ if p.returncode != 0: # pragma: no cover
+ raise ValueError("Error invoking gpg: %s: %s" % (p.returncode,
+ result.stderr))
+ m = VERSION_RE.match(result.data)
+ if not m: # pragma: no cover
+ self.version = None
+ else:
+ dot = '.'.encode('ascii')
+ self.version = tuple([int(s) for s in m.groups()[0].split(dot)])
+
+ def make_args(self, args, passphrase):
+ """
+ Make a list of command line elements for GPG. The value of ``args``
+ will be appended. The ``passphrase`` argument needs to be True if
+ a passphrase will be sent to GPG, else False.
+ """
+ cmd = [self.gpgbinary, '--status-fd', '2', '--no-tty']
+ cmd.extend(['--debug', 'ipc'])
+ if passphrase and hasattr(self, 'version'):
+ if self.version >= (2, 1):
+ cmd[1:1] = ['--pinentry-mode', 'loopback']
+ cmd.extend(['--fixed-list-mode', '--batch', '--with-colons'])
+ if self.gnupghome:
+ cmd.extend(['--homedir', no_quote(self.gnupghome)])
+ if self.keyring:
+ cmd.append('--no-default-keyring')
+ for fn in self.keyring:
+ cmd.extend(['--keyring', no_quote(fn)])
+ if self.secret_keyring:
+ for fn in self.secret_keyring:
+ cmd.extend(['--secret-keyring', no_quote(fn)])
+ if passphrase:
+ cmd.extend(['--passphrase-fd', '0'])
+ if self.use_agent: # pragma: no cover
+ cmd.append('--use-agent')
+ if self.options:
+ cmd.extend(self.options)
+ cmd.extend(args)
+ return cmd
+
+ def _open_subprocess(self, args, passphrase=False):
+ # Internal method: open a pipe to a GPG subprocess and return
+ # the file objects for communicating with it.
+
+ # def debug_print(cmd):
+ # result = []
+ # for c in cmd:
+ # if ' ' not in c:
+ # result.append(c)
+ # else:
+ # if '"' not in c:
+ # result.append('"%s"' % c)
+ # elif "'" not in c:
+ # result.append("'%s'" % c)
+ # else:
+ # result.append(c) # give up
+ # return ' '.join(cmd)
+ from subprocess import list2cmdline as debug_print
+
+ cmd = self.make_args(args, passphrase)
+ if self.verbose: # pragma: no cover
+ print(debug_print(cmd))
+ if not STARTUPINFO:
+ si = None
+ else: # pragma: no cover
+ si = STARTUPINFO()
+ si.dwFlags = STARTF_USESHOWWINDOW
+ si.wShowWindow = SW_HIDE
+ result = Popen(cmd, shell=False, stdin=PIPE, stdout=PIPE, stderr=PIPE,
+ startupinfo=si)
+ logger.debug("%s: %s", result.pid, debug_print(cmd))
+ return result
+
+ def _read_response(self, stream, result):
+ # Internal method: reads all the stderr output from GPG, taking notice
+ # only of lines that begin with the magic [GNUPG:] prefix.
+ #
+ # Calls methods on the response object for each valid token found,
+ # with the arg being the remainder of the status line.
+ lines = []
+ while True:
+ line = stream.readline()
+ if len(line) == 0:
+ break
+ lines.append(line)
+ line = line.rstrip()
+ if self.verbose: # pragma: no cover
+ print(line)
+ logger.debug("%s", line)
+ if line[0:9] == '[GNUPG:] ':
+ # Chop off the prefix
+ line = line[9:]
+ L = line.split(None, 1)
+ keyword = L[0]
+ if len(L) > 1:
+ value = L[1]
+ else:
+ value = ""
+ result.handle_status(keyword, value)
+ result.stderr = ''.join(lines)
+
+ def _read_data(self, stream, result, on_data=None):
+ # Read the contents of the file from GPG's stdout
+ chunks = []
+ while True:
+ data = stream.read(1024)
+ if len(data) == 0:
+ break
+ logger.debug("chunk: %r" % data[:256])
+ chunks.append(data)
+ if on_data:
+ on_data(data)
+ if _py3k:
+ # Join using b'' or '', as appropriate
+ result.data = type(data)().join(chunks)
+ else:
+ result.data = ''.join(chunks)
+
+ def _collect_output(self, process, result, writer=None, stdin=None):
+ """
+ Drain the subprocesses output streams, writing the collected output
+ to the result. If a writer thread (writing to the subprocess) is given,
+ make sure it's joined before returning. If a stdin stream is given,
+ close it before returning.
+ """
+ stderr = codecs.getreader(self.encoding)(process.stderr)
+ rr = threading.Thread(target=self._read_response, args=(stderr, result))
+ rr.setDaemon(True)
+ logger.debug('stderr reader: %r', rr)
+ rr.start()
+
+ stdout = process.stdout
+ dr = threading.Thread(target=self._read_data, args=(stdout, result, self.on_data))
+ dr.setDaemon(True)
+ logger.debug('stdout reader: %r', dr)
+ dr.start()
+
+ dr.join()
+ rr.join()
+ if writer is not None:
+ writer.join()
+ process.wait()
+ if stdin is not None:
+ try:
+ stdin.close()
+ except IOError: # pragma: no cover
+ pass
+ stderr.close()
+ stdout.close()
+
+ def _handle_io(self, args, fileobj, result, passphrase=None, binary=False):
+ "Handle a call to GPG - pass input data, collect output data"
+ # Handle a basic data call - pass data to GPG, handle the output
+ # including status information. Garbage In, Garbage Out :)
+ p = self._open_subprocess(args, passphrase is not None)
+ if not binary: # pragma: no cover
+ stdin = codecs.getwriter(self.encoding)(p.stdin)
+ else:
+ stdin = p.stdin
+ if passphrase:
+ _write_passphrase(stdin, passphrase, self.encoding)
+ writer = _threaded_copy_data(fileobj, stdin)
+ self._collect_output(p, result, writer, stdin)
+ return result
+
+ #
+ # SIGNATURE METHODS
+ #
+ def sign(self, message, **kwargs):
+ """sign message"""
+ f = _make_binary_stream(message, self.encoding)
+ result = self.sign_file(f, **kwargs)
+ f.close()
+ return result
+
+ def set_output_without_confirmation(self, args, output):
+ "If writing to a file which exists, avoid a confirmation message."
+ if os.path.exists(output):
+ # We need to avoid an overwrite confirmation message
+ args.extend(['--yes'])
+ args.extend(['--output', no_quote(output)])
+
+ def sign_file(self, file, keyid=None, passphrase=None, clearsign=True,
+ detach=False, binary=False, output=None, extra_args=None):
+ """sign file"""
+ logger.debug("sign_file: %s", file)
+ if binary: # pragma: no cover
+ args = ['-s']
+ else:
+ args = ['-sa']
+ # You can't specify detach-sign and clearsign together: gpg ignores
+ # the detach-sign in that case.
+ if detach:
+ args.append("--detach-sign")
+ elif clearsign:
+ args.append("--clearsign")
+ if keyid:
+ args.extend(['--default-key', no_quote(keyid)])
+ if output: # write the output to a file with the specified name
+ self.set_output_without_confirmation(args, output)
+
+ if extra_args:
+ args.extend(extra_args)
+ result = self.result_map['sign'](self)
+ #We could use _handle_io here except for the fact that if the
+ #passphrase is bad, gpg bails and you can't write the message.
+ p = self._open_subprocess(args, passphrase is not None)
+ try:
+ stdin = p.stdin
+ if passphrase:
+ _write_passphrase(stdin, passphrase, self.encoding)
+ writer = _threaded_copy_data(file, stdin)
+ except IOError: # pragma: no cover
+ logging.exception("error writing message")
+ writer = None
+ self._collect_output(p, result, writer, stdin)
+ return result
+
+ def verify(self, data, **kwargs):
+ """Verify the signature on the contents of the string 'data'
+
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome="keys")
+ >>> input = gpg.gen_key_input(passphrase='foo')
+ >>> key = gpg.gen_key(input)
+ >>> assert key
+ >>> sig = gpg.sign('hello',keyid=key.fingerprint,passphrase='bar')
+ >>> assert not sig
+ >>> sig = gpg.sign('hello',keyid=key.fingerprint,passphrase='foo')
+ >>> assert sig
+ >>> verify = gpg.verify(sig.data)
+ >>> assert verify
+
+ """
+ f = _make_binary_stream(data, self.encoding)
+ result = self.verify_file(f, **kwargs)
+ f.close()
+ return result
+
+ def verify_file(self, file, data_filename=None, close_file=True, extra_args=None):
+ "Verify the signature on the contents of the file-like object 'file'"
+ logger.debug('verify_file: %r, %r', file, data_filename)
+ result = self.result_map['verify'](self)
+ args = ['--verify']
+ if extra_args:
+ args.extend(extra_args)
+ if data_filename is None:
+ self._handle_io(args, file, result, binary=True)
+ else:
+ logger.debug('Handling detached verification')
+ import tempfile
+ fd, fn = tempfile.mkstemp(prefix='pygpg')
+ s = file.read()
+ if close_file:
+ file.close()
+ logger.debug('Wrote to temp file: %r', s)
+ os.write(fd, s)
+ os.close(fd)
+ args.append(no_quote(fn))
+ args.append(no_quote(data_filename))
+ try:
+ p = self._open_subprocess(args)
+ self._collect_output(p, result, stdin=p.stdin)
+ finally:
+ os.unlink(fn)
+ return result
+
+ def verify_data(self, sig_filename, data, extra_args=None):
+ "Verify the signature in sig_filename against data in memory"
+ logger.debug('verify_data: %r, %r ...', sig_filename, data[:16])
+ result = self.result_map['verify'](self)
+ args = ['--verify']
+ if extra_args:
+ args.extend(extra_args)
+ args.extend([no_quote(sig_filename), '-'])
+ stream = _make_memory_stream(data)
+ self._handle_io(args, stream, result, binary=True)
+ return result
+
+ #
+ # KEY MANAGEMENT
+ #
+
+ def import_keys(self, key_data):
+ """
+ Import the key_data into our keyring.
+ """
+ result = self.result_map['import'](self)
+ logger.debug('import_keys: %r', key_data[:256])
+ data = _make_binary_stream(key_data, self.encoding)
+ self._handle_io(['--import'], data, result, binary=True)
+ logger.debug('import_keys result: %r', result.__dict__)
+ data.close()
+ return result
+
+ def recv_keys(self, keyserver, *keyids):
+ """Import a key from a keyserver
+
+ >>> import shutil
+ >>> shutil.rmtree("keys", ignore_errors=True)
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome="keys")
+ >>> os.chmod('keys', 0x1C0)
+ >>> result = gpg.recv_keys('pgp.mit.edu', '92905378')
+ >>> assert result
+
+ """
+ result = self.result_map['import'](self)
+ logger.debug('recv_keys: %r', keyids)
+ data = _make_binary_stream("", self.encoding)
+ #data = ""
+ args = ['--keyserver', no_quote(keyserver), '--recv-keys']
+ args.extend([no_quote(k) for k in keyids])
+ self._handle_io(args, data, result, binary=True)
+ logger.debug('recv_keys result: %r', result.__dict__)
+ data.close()
+ return result
+
+ def send_keys(self, keyserver, *keyids):
+ """Send a key to a keyserver.
+
+ Note: it's not practical to test this function without sending
+ arbitrary data to live keyservers.
+ """
+ result = self.result_map['send'](self)
+ logger.debug('send_keys: %r', keyids)
+ data = _make_binary_stream('', self.encoding)
+ #data = ""
+ args = ['--keyserver', no_quote(keyserver), '--send-keys']
+ args.extend([no_quote(k) for k in keyids])
+ self._handle_io(args, data, result, binary=True)
+ logger.debug('send_keys result: %r', result.__dict__)
+ data.close()
+ return result
+
+ def delete_keys(self, fingerprints, secret=False, passphrase=None):
+ which='key'
+ if secret: # pragma: no cover
+ if self.version >= (2, 1) and passphrase is None:
+ raise ValueError('For GnuPG >= 2.1, deleting secret keys '
+ 'needs a passphrase to be provided')
+ which='secret-key'
+ if _is_sequence(fingerprints): # pragma: no cover
+ fingerprints = [no_quote(s) for s in fingerprints]
+ else:
+ fingerprints = [no_quote(fingerprints)]
+ args = ['--delete-%s' % which]
+ args.extend(fingerprints)
+ result = self.result_map['delete'](self)
+ if not secret or self.version < (2, 1):
+ p = self._open_subprocess(args)
+ self._collect_output(p, result, stdin=p.stdin)
+ else:
+ # Need to send in a passphrase.
+ f = _make_binary_stream('', self.encoding)
+ try:
+ self._handle_io(args, f, result, passphrase=passphrase,
+ binary=True)
+ finally:
+ f.close()
+ return result
+
+ def export_keys(self, keyids, secret=False, armor=True, minimal=False,
+ passphrase=None):
+ """
+ Export the indicated keys. A 'keyid' is anything gpg accepts.
+
+ Since GnuPG 2.1, you can't export secret keys without providing a
+ passphrase.
+ """
+
+ which=''
+ if secret:
+ which='-secret-key'
+ if self.version >= (2, 1) and passphrase is None:
+ raise ValueError('For GnuPG >= 2.1, exporting secret keys '
+ 'needs a passphrase to be provided')
+ if _is_sequence(keyids):
+ keyids = [no_quote(k) for k in keyids]
+ else:
+ keyids = [no_quote(keyids)]
+ args = ['--export%s' % which]
+ if armor:
+ args.append('--armor')
+ if minimal: # pragma: no cover
+ args.extend(['--export-options','export-minimal'])
+ args.extend(keyids)
+ # gpg --export produces no status-fd output; stdout will be
+ # empty in case of failure
+ #stdout, stderr = p.communicate()
+ result = self.result_map['export'](self)
+ if not secret or self.version < (2, 1):
+ p = self._open_subprocess(args)
+ self._collect_output(p, result, stdin=p.stdin)
+ else:
+ # Need to send in a passphrase.
+ f = _make_binary_stream('', self.encoding)
+ try:
+ self._handle_io(args, f, result, passphrase=passphrase,
+ binary=True)
+ finally:
+ f.close()
+ logger.debug('export_keys result: %r', result.data)
+ # Issue #49: Return bytes if armor not specified, else text
+ result = result.data
+ if armor:
+ result = result.decode(self.encoding, self.decode_errors)
+ return result
+
+ def _get_list_output(self, p, kind):
+ # Get the response information
+ result = self.result_map[kind](self)
+ self._collect_output(p, result, stdin=p.stdin)
+ lines = result.data.decode(self.encoding,
+ self.decode_errors).splitlines()
+ valid_keywords = 'pub uid sec fpr sub ssb sig'.split()
+ for line in lines:
+ if self.verbose: # pragma: no cover
+ print(line)
+ logger.debug("line: %r", line.rstrip())
+ if not line: # pragma: no cover
+ break
+ L = line.strip().split(':')
+ if not L: # pragma: no cover
+ continue
+ keyword = L[0]
+ if keyword in valid_keywords:
+ getattr(result, keyword)(L)
+ return result
+
+ def list_keys(self, secret=False, keys=None, sigs=False):
+ """ list the keys currently in the keyring
+
+ >>> import shutil
+ >>> shutil.rmtree("keys", ignore_errors=True)
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome="keys")
+ >>> input = gpg.gen_key_input(passphrase='foo')
+ >>> result = gpg.gen_key(input)
+ >>> fp1 = result.fingerprint
+ >>> result = gpg.gen_key(input)
+ >>> fp2 = result.fingerprint
+ >>> pubkeys = gpg.list_keys()
+ >>> assert fp1 in pubkeys.fingerprints
+ >>> assert fp2 in pubkeys.fingerprints
+
+ """
+
+ if sigs:
+ which = 'sigs'
+ else: which='keys'
+ if secret:
+ which='secret-keys'
+ args = ['--list-%s' % which,
+ '--fingerprint', '--fingerprint'] # get subkey FPs, too
+ if keys:
+ if isinstance(keys, string_types):
+ keys = [keys]
+ args.extend(keys)
+ p = self._open_subprocess(args)
+ return self._get_list_output(p, 'list')
+
+ def scan_keys(self, filename):
+ """
+ List details of an ascii armored or binary key file
+ without first importing it to the local keyring.
+
+ The function achieves this on modern GnuPG by running:
+
+ $ gpg --dry-run --import-options import-show --import
+
+ On older versions, it does the *much* riskier:
+
+ $ gpg --with-fingerprint --with-colons filename
+ """
+ if self.version >= (2, 1):
+ args = ['--dry-run', '--import-options', 'import-show', '--import']
+ else:
+ logger.warning('Trying to list packets, but if the file is not a '
+ 'keyring, might accidentally decrypt')
+ args = ['--with-fingerprint', '--with-colons', '--fixed-list-mode']
+ args.append(no_quote(filename))
+ p = self._open_subprocess(args)
+ return self._get_list_output(p, 'scan')
+
+ def search_keys(self, query, keyserver='pgp.mit.edu'):
+ """ search keyserver by query (using --search-keys option)
+
+ >>> import shutil
+ >>> shutil.rmtree('keys', ignore_errors=True)
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome='keys')
+ >>> os.chmod('keys', 0x1C0)
+ >>> result = gpg.search_keys('<vinay_sajip@hotmail.com>')
+ >>> assert result, 'Failed using default keyserver'
+ >>> #keyserver = 'keyserver.ubuntu.com'
+ >>> #result = gpg.search_keys('<vinay_sajip@hotmail.com>', keyserver)
+ >>> #assert result, 'Failed using keyserver.ubuntu.com'
+
+ """
+ query = query.strip()
+ if HEX_DIGITS_RE.match(query):
+ query = '0x' + query
+ args = ['--fingerprint',
+ '--keyserver', no_quote(keyserver), '--search-keys',
+ no_quote(query)]
+ p = self._open_subprocess(args)
+
+ # Get the response information
+ result = self.result_map['search'](self)
+ self._collect_output(p, result, stdin=p.stdin)
+ lines = result.data.decode(self.encoding,
+ self.decode_errors).splitlines()
+ valid_keywords = ['pub', 'uid']
+ for line in lines:
+ if self.verbose: # pragma: no cover
+ print(line)
+ logger.debug('line: %r', line.rstrip())
+ if not line: # sometimes get blank lines on Windows
+ continue
+ L = line.strip().split(':')
+ if not L: # pragma: no cover
+ continue
+ keyword = L[0]
+ if keyword in valid_keywords:
+ getattr(result, keyword)(L)
+ return result
+
+ def gen_key(self, input):
+ """Generate a key; you might use gen_key_input() to create the
+ control input.
+
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome="keys")
+ >>> input = gpg.gen_key_input(passphrase='foo')
+ >>> result = gpg.gen_key(input)
+ >>> assert result
+ >>> result = gpg.gen_key('foo')
+ >>> assert not result
+
+ """
+ args = ["--gen-key"]
+ result = self.result_map['generate'](self)
+ f = _make_binary_stream(input, self.encoding)
+ self._handle_io(args, f, result, binary=True)
+ f.close()
+ return result
+
+ def gen_key_input(self, **kwargs):
+ """
+ Generate --gen-key input per gpg doc/DETAILS
+ """
+ parms = {}
+ for key, val in list(kwargs.items()):
+ key = key.replace('_','-').title()
+ if str(val).strip(): # skip empty strings
+ parms[key] = val
+ parms.setdefault('Key-Type','RSA')
+ parms.setdefault('Key-Length',2048)
+ parms.setdefault('Name-Real', "Autogenerated Key")
+ logname = (os.environ.get('LOGNAME') or os.environ.get('USERNAME') or
+ 'unspecified')
+ hostname = socket.gethostname()
+ parms.setdefault('Name-Email', "%s@%s" % (logname.replace(' ', '_'),
+ hostname))
+ out = "Key-Type: %s\n" % parms.pop('Key-Type')
+ for key, val in list(parms.items()):
+ out += "%s: %s\n" % (key, val)
+ out += "%commit\n"
+ return out
+
+ # Key-Type: RSA
+ # Key-Length: 1024
+ # Name-Real: ISdlink Server on %s
+ # Name-Comment: Created by %s
+ # Name-Email: isdlink@%s
+ # Expire-Date: 0
+ # %commit
+ #
+ #
+ # Key-Type: DSA
+ # Key-Length: 1024
+ # Subkey-Type: ELG-E
+ # Subkey-Length: 1024
+ # Name-Real: Joe Tester
+ # Name-Comment: with stupid passphrase
+ # Name-Email: joe@foo.bar
+ # Expire-Date: 0
+ # Passphrase: abc
+ # %pubring foo.pub
+ # %secring foo.sec
+ # %commit
+
+ #
+ # ENCRYPTION
+ #
+ def encrypt_file(self, file, recipients, sign=None,
+ always_trust=False, passphrase=None,
+ armor=True, output=None, symmetric=False, extra_args=None):
+ "Encrypt the message read from the file-like object 'file'"
+ args = ['--encrypt']
+ if symmetric:
+ # can't be False or None - could be True or a cipher algo value
+ # such as AES256
+ args = ['--symmetric']
+ if symmetric is not True:
+ args.extend(['--cipher-algo', no_quote(symmetric)])
+ # else use the default, currently CAST5
+ else:
+ if not recipients:
+ raise ValueError('No recipients specified with asymmetric '
+ 'encryption')
+ if not _is_sequence(recipients):
+ recipients = (recipients,)
+ for recipient in recipients:
+ args.extend(['--recipient', no_quote(recipient)])
+ if armor: # create ascii-armored output - False for binary output
+ args.append('--armor')
+ if output: # write the output to a file with the specified name
+ self.set_output_without_confirmation(args, output)
+ if sign is True: # pragma: no cover
+ args.append('--sign')
+ elif sign: # pragma: no cover
+ args.extend(['--sign', '--default-key', no_quote(sign)])
+ if always_trust: # pragma: no cover
+ args.append('--always-trust')
+ if extra_args:
+ args.extend(extra_args)
+ result = self.result_map['crypt'](self)
+ self._handle_io(args, file, result, passphrase=passphrase, binary=True)
+ logger.debug('encrypt result: %r', result.data)
+ return result
+
+ def encrypt(self, data, recipients, **kwargs):
+ """Encrypt the message contained in the string 'data'
+
+ >>> import shutil
+ >>> if os.path.exists("keys"):
+ ... shutil.rmtree("keys", ignore_errors=True)
+ >>> GPGBINARY = os.environ.get('GPGBINARY', 'gpg')
+ >>> gpg = GPG(gpgbinary=GPGBINARY, gnupghome="keys")
+ >>> input = gpg.gen_key_input(name_email='user1@test', passphrase='pp1')
+ >>> result = gpg.gen_key(input)
+ >>> fp1 = result.fingerprint
+ >>> input = gpg.gen_key_input(name_email='user2@test', passphrase='pp2')
+ >>> result = gpg.gen_key(input)
+ >>> fp2 = result.fingerprint
+ >>> result = gpg.encrypt("hello",fp2)
+ >>> message = str(result)
+ >>> assert message != 'hello'
+ >>> result = gpg.decrypt(message, passphrase='pp2')
+ >>> assert result
+ >>> str(result)
+ 'hello'
+ >>> result = gpg.encrypt("hello again", fp1)
+ >>> message = str(result)
+ >>> result = gpg.decrypt(message, passphrase='bar')
+ >>> result.status in ('decryption failed', 'bad passphrase')
+ True
+ >>> assert not result
+ >>> result = gpg.decrypt(message, passphrase='pp1')
+ >>> result.status == 'decryption ok'
+ True
+ >>> str(result)
+ 'hello again'
+ >>> result = gpg.encrypt("signed hello", fp2, sign=fp1, passphrase='pp1')
+ >>> result.status == 'encryption ok'
+ True
+ >>> message = str(result)
+ >>> result = gpg.decrypt(message, passphrase='pp2')
+ >>> result.status == 'decryption ok'
+ True
+ >>> assert result.fingerprint == fp1
+
+ """
+ data = _make_binary_stream(data, self.encoding)
+ result = self.encrypt_file(data, recipients, **kwargs)
+ data.close()
+ return result
+
+ def decrypt(self, message, **kwargs):
+ data = _make_binary_stream(message, self.encoding)
+ result = self.decrypt_file(data, **kwargs)
+ data.close()
+ return result
+
+ def decrypt_file(self, file, always_trust=False, passphrase=None,
+ output=None, extra_args=None):
+ args = ["--decrypt"]
+ if output: # write the output to a file with the specified name
+ self.set_output_without_confirmation(args, output)
+ if always_trust: # pragma: no cover
+ args.append("--always-trust")
+ if extra_args:
+ args.extend(extra_args)
+ result = self.result_map['crypt'](self)
+ self._handle_io(args, file, result, passphrase, binary=True)
+ logger.debug('decrypt result: %r', result.data)
+ return result
diff --git a/tools/dist/security/adviser.py b/tools/dist/security/adviser.py
new file mode 100644
index 0000000..e7c87d3
--- /dev/null
+++ b/tools/dist/security/adviser.py
@@ -0,0 +1,62 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""
+Generator of textual advisories for subversion.a.o/security
+"""
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+
+def __write_advisory(metadata, fd):
+ """
+ Create a textual representation of the advisory described
+ by METADATA and write it to the file descriptor FD.
+ """
+
+ fd.write(metadata.advisory.text)
+ if not metadata.patches:
+ return
+
+ fd.write('\nPatches:'
+ '\n========\n')
+ for patch in metadata.patches:
+ fd.write('\n Patch for Subversion ' + patch.base_version + ':\n'
+ '[[[\n')
+ fd.write(patch.text)
+ fd.write(']]]\n')
+
+def generate(notification, target_dir):
+ """
+ Generate all advisories in NOTIFICATION as text files
+ in TARGET_DIR. If TARGET_DIR is None, the advisory texts
+ will be written to the standard output.
+ """
+
+ for metadata in notification:
+ if target_dir is None:
+ __write_advisory(metadata, sys.stdout)
+ continue
+
+ filename = metadata.tracking_id + '-advisory.txt'
+ with open(os.path.join(target_dir, filename), 'wt') as fd:
+ __write_advisory(metadata, fd)
diff --git a/tools/dist/security/mailer.py b/tools/dist/security/mailer.py
new file mode 100644
index 0000000..3d622f8
--- /dev/null
+++ b/tools/dist/security/mailer.py
@@ -0,0 +1,322 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""
+Generator of signed advisory mails
+"""
+
+from __future__ import absolute_import
+
+import re
+import uuid
+import hashlib
+import smtplib
+import textwrap
+import email.utils
+
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+try:
+ import gnupg
+except ImportError:
+ import security._gnupg as gnupg
+
+import security.parser
+
+
+class Mailer(object):
+ """
+ Constructs signed PGP/MIME advisory mails.
+ """
+
+ def __init__(self, notification, sender, message_template,
+ release_date, dist_revision, *release_versions):
+ assert len(notification) > 0
+ self.__sender = sender
+ self.__notification = notification
+ self.__message_content = self.__message_content(
+ message_template, release_date, dist_revision, release_versions)
+
+ def __subject(self):
+ """
+ Construct a subject line for the notification mail.
+ """
+
+ template = ('Confidential pre-notification of'
+ ' {multiple}Subversion {culprit}{vulnerability}')
+
+ # Construct the {culprit} replacement value. If all advisories
+ # are either about the server or the client, use the
+ # appropriate value; for mixed server/client advisories, use
+ # an empty string.
+ culprit = set()
+ for metadata in self.__notification:
+ culprit |= metadata.culprit
+ assert len(culprit) > 0
+ if len(culprit) > 1:
+ culprit = ''
+ elif self.__notification.Metadata.CULPRIT_CLIENT in culprit:
+ culprit = 'client '
+ elif self.__notification.Metadata.CULPRIT_SERVER in culprit:
+ culprit = 'server '
+ else:
+ raise ValueError('Unknown culprit ' + repr(culprit))
+
+ # Construct the format parameters
+ if len(self.__notification) > 1:
+ kwargs = dict(multiple='multiple ', culprit=culprit,
+ vulnerability='vulnerabilities')
+ else:
+ kwargs = dict(multiple='a ', culprit=culprit,
+ vulnerability='vulnerability')
+
+ return template.format(**kwargs)
+
+ def __message_content(self, message_template,
+ release_date, dist_revision, release_versions):
+ """
+ Construct the message from the notification mail template.
+ """
+
+ # Construct the replacement arguments for the notification template
+ culprits = set()
+ advisories = []
+ base_version_keys = self.__notification.base_version_keys()
+ for metadata in self.__notification:
+ culprits |= metadata.culprit
+ advisories.append(
+ ' * {}\n {}'.format(metadata.tracking_id, metadata.title))
+ release_version_keys = set(security.parser.Patch.split_version(n)
+ for n in release_versions)
+
+ multi = (len(self.__notification) > 1)
+ kwargs = dict(multiple=(multi and 'multiple ' or 'a '),
+ alert=(multi and 'alerts' or 'alert'),
+ culprits=self.__culprits(culprits),
+ advisories='\n'.join(advisories),
+ release_date=release_date.strftime('%d %B %Y'),
+ release_day=release_date.strftime('%d %B'),
+ base_versions = self.__versions(base_version_keys),
+ release_versions = self.__versions(release_version_keys),
+ dist_revision=str(dist_revision))
+
+ # Parse, interpolate and rewrap the notification template
+ wrapped = []
+ content = security.parser.Text(message_template)
+ for line in content.text.format(**kwargs).split('\n'):
+ if len(line) > 0 and not line[0].isspace():
+ for part in textwrap.wrap(line,
+ break_long_words=False,
+ break_on_hyphens=False):
+ wrapped.append(part)
+ else:
+ wrapped.append(line)
+ return security.parser.Text(None, '\n'.join(wrapped).encode('utf-8'))
+
+ def __versions(self, versions):
+ """
+ Return a textual representation of the set of VERSIONS
+ suitable for inclusion in a notification mail.
+ """
+
+ text = tuple(security.parser.Patch.join_version(n)
+ for n in sorted(versions))
+ assert len(text) > 0
+ if len(text) == 1:
+ return text[0]
+ elif len(text) == 2:
+ return ' and '.join(text)
+ else:
+ return ', '.join(text[:-1]) + ' and ' + text[-1]
+
+ def __culprits(self, culprits):
+ """
+ Return a textual representation of the set of CULPRITS
+ suitable for inclusion in a notification mail.
+ """
+
+ if self.__notification.Metadata.CULPRIT_CLIENT in culprits:
+ if self.__notification.Metadata.CULPRIT_SERVER in culprits:
+ return 'clients and servers'
+ else:
+ return 'clients'
+ elif self.__notification.Metadata.CULPRIT_SERVER in culprits:
+ return 'servers'
+ else:
+ raise ValueError('Unknown culprit ' + repr(culprits))
+
+ def __attachments(self):
+ filenames = set()
+
+ def attachment(filename, description, encoding, content):
+ if filename in filenames:
+ raise ValueError('Named attachment already exists: '
+ + filename)
+ filenames.add(filename)
+
+ att = MIMEText('', 'plain', 'utf-8')
+ att.set_param('name', filename)
+ att.replace_header('Content-Transfer-Encoding', encoding)
+ att.add_header('Content-Description', description)
+ att.add_header('Content-Disposition',
+ 'attachment', filename=filename)
+ att.set_payload(content)
+ return att
+
+ for metadata in self.__notification:
+ filename = metadata.tracking_id + '-advisory.txt'
+ description = metadata.tracking_id + ' Advisory'
+ yield attachment(filename, description, 'quoted-printable',
+ metadata.advisory.quoted_printable)
+
+ for patch in metadata.patches:
+ filename = (metadata.tracking_id +
+ '-' + patch.base_version + '.patch')
+ description = (metadata.tracking_id
+ + ' Patch for Subversion ' + patch.base_version)
+ yield attachment(filename, description, 'base64', patch.base64)
+
+ def generate_message(self):
+ message = SignedMessage(
+ self.__message_content,
+ self.__attachments())
+ message['From'] = self.__sender
+ message['Reply-To'] = self.__sender
+ message['To'] = self.__sender # Will be replaced later
+ message['Subject'] = self.__subject()
+ message['Date'] = email.utils.formatdate()
+
+ # Try to make the message-id refer to the sender's domain
+ address = email.utils.parseaddr(self.__sender)[1]
+ if not address:
+ domain = None
+ else:
+ domain = address.split('@')[1]
+ if not domain:
+ domain = None
+
+ idstring = uuid.uuid1().hex
+ try:
+ msgid = email.utils.make_msgid(idstring, domain=domain)
+ except TypeError:
+ # The domain keyword was added in Python 3.2
+ msgid = email.utils.make_msgid(idstring)
+ message["Message-ID"] = msgid
+ return message
+
+ def send_mail(self, message, username, password, recipients=None,
+ host='mail-relay.apache.org', starttls=True, port=None):
+ if not port and starttls:
+ port = 587
+ server = smtplib.SMTP(host, port)
+ if starttls:
+ server.starttls()
+ if username and password:
+ server.login(username, password)
+
+ def send(message):
+ # XXX: The from,to arguments should be bare addresses with no "foo:"
+ # prefix. It works this way in practice, but that appears to
+ # be an accident of implementation of smtplib.
+ server.sendmail("From: " + message['From'],
+ "To: " + message['To'],
+ message.as_string())
+
+ if recipients is None:
+ # Test mode, send message back to originator to checck
+ # that contents and signature are OK.
+ message.replace_header('To', message['From'])
+ send(message)
+ else:
+ for recipient in recipients:
+ message.replace_header('To', recipient)
+ send(message)
+ server.quit()
+
+
+class SignedMessage(MIMEMultipart):
+ """
+ The signed PGP/MIME message.
+ """
+
+ def __init__(self, message, attachments,
+ gpgbinary='gpg', gnupghome=None, use_agent=True,
+ keyring=None, keyid=None):
+
+ # Hack around the fact that the Pyton 2.x MIMEMultipart is not
+ # a new-style class.
+ try:
+ unicode # Doesn't exist in Python 3
+ MIMEMultipart.__init__(self, 'signed')
+ except NameError:
+ super(SignedMessage, self).__init__('signed')
+
+ payload = self.__payload(message, attachments)
+ signature = self.__signature(
+ payload, gpgbinary, gnupghome, use_agent, keyring, keyid)
+
+ self.set_param('protocol', 'application/pgp-signature')
+ self.set_param('micalg', 'pgp-sha512') ####!!! GET THIS FROM KEY!
+ self.preamble = 'This is an OpenPGP/MIME signed message.'
+ self.attach(payload)
+ self.attach(signature)
+
+ def __payload(self, message, attachments):
+ """
+ Create the payload from the given MESSAGE and a
+ set of pre-cooked ATTACHMENTS.
+ """
+
+ payload = MIMEMultipart()
+ payload.preamble = 'This is a multi-part message in MIME format.'
+
+ msg = MIMEText('', 'plain', 'utf-8')
+ msg.replace_header('Content-Transfer-Encoding', 'quoted-printable')
+ msg.set_payload(message.quoted_printable)
+ payload.attach(msg)
+
+ for att in attachments:
+ payload.attach(att)
+ return payload
+
+ def __signature(self, payload,
+ gpgbinary, gnupghome, use_agent, keyring, keyid):
+ """
+ Sign the PAYLOAD and return the detached signature as
+ a MIME attachment.
+ """
+
+ # RFC3156 section 5 says line endings in the signed message
+ # must be canonical <CR><LF>.
+ cleartext = re.sub(r'\r?\n', '\r\n', payload.as_string())
+
+ gpg = gnupg.GPG(gpgbinary=gpgbinary, gnupghome=gnupghome,
+ use_agent=use_agent, keyring=keyring)
+ signature = gpg.sign(cleartext,
+ keyid=keyid, detach=True, clearsign=False)
+ sig = MIMEText('')
+ sig.set_type('application/pgp-signature')
+ sig.set_charset(None)
+ sig.set_param('name', 'signature.asc')
+ sig.add_header('Content-Description', 'OpenPGP digital signature')
+ sig.add_header('Content-Disposition',
+ 'attachment', filename='signature.asc')
+ sig.set_payload(str(signature))
+ return sig
diff --git a/tools/dist/security/mailinglist.py b/tools/dist/security/mailinglist.py
new file mode 100644
index 0000000..df473f2
--- /dev/null
+++ b/tools/dist/security/mailinglist.py
@@ -0,0 +1,56 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""
+Parser for advisory e-mail distribution addresses
+"""
+
+from __future__ import absolute_import
+
+
+import os
+import re
+
+
+class MailingList(object):
+ """
+ A list of e-mail addresses for security advisory pre-notifications.
+ Parses ^/pmc/subversion/security/pre-notifications.txt
+ """
+
+ __ADDRESS_LINE = re.compile(r'^\s{6}(?:[^<]+)?<[^<>]+>\s*$')
+
+ def __init__(self, mailing_list):
+ self.__addresses = []
+ self.__parse_addresses(mailing_list)
+
+ def __iter__(self):
+ return self.__addresses.__iter__()
+
+ def __len__(self):
+ return len(self.__addresses)
+
+ def __parse_addresses(self, mailing_list):
+ with open(mailing_list, 'rt') as pn:
+ for line in pn:
+ m = self.__ADDRESS_LINE.match(line)
+ if not m:
+ continue
+
+ self.__addresses.append(line.strip())
diff --git a/tools/dist/security/parser.py b/tools/dist/security/parser.py
new file mode 100644
index 0000000..2f1b883
--- /dev/null
+++ b/tools/dist/security/parser.py
@@ -0,0 +1,280 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""
+Parser for CVE/CAN advisories and patches.
+"""
+
+from __future__ import absolute_import
+
+
+import os
+import re
+import ast
+import base64
+import quopri
+
+
+class Notification(object):
+ """
+ The complete security notification, containing multiple advisories.
+ """
+
+ class Metadata(object):
+ """
+ The metadata for one advisory, with the following properties:
+ tracking_id - the CVE/CAN number
+ title - a short description of the issue
+ culprit - server, client or both
+ advisory - a Text object with the text of the advisory
+ patches - a list of Patch objects, sorted in descending
+ order by the base version
+ """
+
+ CULPRIT_SERVER = 'server'
+ CULPRIT_CLIENT = 'client'
+
+ __CULPRITS = ((CULPRIT_SERVER, CULPRIT_CLIENT,
+ (CULPRIT_SERVER, CULPRIT_CLIENT),
+ (CULPRIT_CLIENT, CULPRIT_SERVER)))
+
+ def __init__(self, basedir, tracking_id,
+ title, culprit, advisory, patches):
+ if culprit not in self.__CULPRITS:
+ raise ValueError('Culprit should be one of: '
+ + ', '.join(repr(x) for x in self.__CULPRITS))
+ if not isinstance(culprit, tuple):
+ culprit = (culprit,)
+
+ self.__tracking_id = tracking_id
+ self.__title = title
+ self.__culprit = frozenset(culprit)
+ self.__advisory = Text(os.path.join(basedir, advisory))
+ self.__patches = []
+ for base_version, patchfile in patches.items():
+ patch = Patch(base_version, os.path.join(basedir, patchfile))
+ self.__patches.append(patch)
+ self.__patches.sort(reverse=True, key=lambda x: x.base_version_key)
+
+ @property
+ def tracking_id(self):
+ return self.__tracking_id
+
+ @property
+ def title(self):
+ return self.__title
+
+ @property
+ def culprit(self):
+ return self.__culprit
+
+ @property
+ def advisory(self):
+ return self.__advisory
+
+ @property
+ def patches(self):
+ return tuple(self.__patches)
+
+
+ def __init__(self, rootdir, *tracking_ids):
+ """
+ Create the security notification for all TRACKING_IDS.
+ The advisories and patches for each tracking ID must be
+ in the appropreiately named subdirectory of ROOTDIR.
+
+ The notification text assumes that RELEASE_VERSIONS will
+ be published on RELEASE_DATE and that the tarballs are
+ available in DIST_REVISION of the dist repository.
+ """
+
+ assert(len(tracking_ids) > 0)
+ self.__advisories = []
+ for tid in tracking_ids:
+ self.__advisories.append(self.__parse_advisory(rootdir, tid))
+
+ def __iter__(self):
+ return self.__advisories.__iter__()
+
+ def __len__(self):
+ return len(self.__advisories)
+
+ def __parse_advisory(self, rootdir, tracking_id):
+ """
+ Parse a single advisory named TRACKING_ID in ROOTDIR.
+ """
+
+ basedir = os.path.join(rootdir, tracking_id)
+ with open(os.path.join(basedir, 'metadata'), 'rt') as md:
+ metadata = ast.literal_eval(md.read())
+
+ return self.Metadata(basedir, tracking_id,
+ metadata['title'],
+ metadata['culprit'],
+ metadata['advisory'],
+ metadata['patches'])
+
+ def base_version_keys(self):
+ """
+ Return the set of base-version keys of all the patches.
+ """
+
+ base_version_keys = set()
+ for metadata in self:
+ for patch in metadata.patches:
+ base_version_keys.add(patch.base_version_key)
+ return base_version_keys
+
+
+class __Part(object):
+ def __init__(self, path, text=None):
+ """
+ Create a text object with contents from the file at PATH.
+ If self.TEXTMODE is True, strip whitespace from the end of
+ all lines and strip empty lines from the end of the file.
+
+ Alternatively, if PATH is None, set the contents to TEXT,
+ which must be convertible to bytes.
+ """
+
+ assert (path is None) is not (text is None)
+ if path:
+ self.__text = self.__load_file(path)
+ else:
+ self.__text = bytes(text)
+
+ def __load_file(self, path):
+ with open(path, 'rb') as src:
+ if not self.TEXTMODE:
+ return src.read()
+
+ text = []
+ for line in src:
+ text.append(line.rstrip() + b'\n')
+
+ # Strip trailing empty lines in text mode
+ while len(text) and not text[-1]:
+ del text[-1]
+ return b''.join(text)
+
+ @property
+ def text(self):
+ """
+ Return the raw contents.
+ """
+
+ return self.__text.decode('UTF-8')
+
+ @property
+ def quoted_printable(self):
+ """
+ Return contents encoded as quoted-printable.
+ """
+
+ return quopri.encodestring(self.__text).decode('ascii')
+
+ BASE64_LINE_LENGTH = 64
+
+ @property
+ def base64(self):
+ """
+ Return multi-line Base64-encoded contents with the lenght
+ of the lines limited to BASE64_LINE_LENGTH.
+ """
+
+ text = []
+ data = base64.standard_b64encode(self.__text)
+ start = 0
+ end = self.BASE64_LINE_LENGTH
+ while end < len(data):
+ text.append(data[start:end] + b'\n')
+ start += self.BASE64_LINE_LENGTH
+ end += self.BASE64_LINE_LENGTH
+ if start < len(data):
+ text.append(data[start:] + b'\n')
+ return b''.join(text).decode('ascii')
+
+
+class Text(__Part):
+ """
+ In-memory container for the text of the advisory.
+ """
+
+ TEXTMODE = True
+
+
+class Patch(__Part):
+ """
+ In-memory container for patches.
+ """
+
+ TEXTMODE = False
+
+ def __init__(self, base_version, path):
+ super(Patch, self).__init__(path)
+ self.__base_version = base_version
+ self.__base_version_key = self.split_version(base_version)
+
+ @property
+ def base_version(self):
+ return self.__base_version
+
+ @property
+ def base_version_key(self):
+ return self.__base_version_key
+
+ @property
+ def quoted_printable(self):
+ raise NotImplementedError('Quoted-printable patches? Really?')
+
+
+ __SPLIT_VERSION_RX = re.compile(r'^(\d+)(?:\.(\d+))?(?:\.(\d+))?(.+)?$')
+
+ @classmethod
+ def split_version(cls, version):
+ """
+ Splits a version number in the form n.n.n-tag into a tuple
+ of its components.
+ """
+ def splitv(version):
+ for s in cls.__SPLIT_VERSION_RX.match(version).groups():
+ if s is None:
+ continue
+ try:
+ n = int(s)
+ except ValueError:
+ n = s
+ yield n
+ return tuple(splitv(version))
+
+ @classmethod
+ def join_version(cls, version_tuple):
+ """
+ Joins a version number tuple returned by Patch.split_version
+ into a string.
+ """
+
+ def joinv(version_tuple):
+ prev = None
+ for n in version_tuple:
+ if isinstance(n, int) and prev is not None:
+ yield '.'
+ prev = n
+ yield str(n)
+ return ''.join(joinv(version_tuple))
diff --git a/tools/dist/templates/download.ezt b/tools/dist/templates/download.ezt
new file mode 100644
index 0000000..4c6fda8
--- /dev/null
+++ b/tools/dist/templates/download.ezt
@@ -0,0 +1,17 @@
+<p style="font-size: 150%; text-align: center;">Apache Subversion [version]</p>
+<table class="centered">
+<tr>
+ <th>File</th>
+ <th>Checksum (SHA1)</th>
+ <th>Checksum (SHA512)</th>
+ <th>Signatures</th>
+</tr>
+[for fileinfo]<tr>
+ <td><a href="[[]preferred]subversion/[fileinfo.filename]">[fileinfo.filename]</a></td>
+ <td class="checksum">[fileinfo.sha1]</td>
+ <!-- The sha512 line does not have a class="checksum" since the link needn't
+ be rendered in monospace. -->
+ <td>[<a href="http://www.apache.org/dist/subversion/[fileinfo.filename].sha512">SHA-512</a>]</td>
+ <td>[<a href="http://www.apache.org/dist/subversion/[fileinfo.filename].asc">PGP</a>]</td>
+</tr>[end]
+</table>
diff --git a/tools/dist/templates/rc-news.ezt b/tools/dist/templates/rc-news.ezt
new file mode 100644
index 0000000..a645ffa
--- /dev/null
+++ b/tools/dist/templates/rc-news.ezt
@@ -0,0 +1,22 @@
+<div class="h3" id="news-[date]">
+<h3>[date_pres] &mdash; Apache Subversion [version] Released
+ <a class="sectionlink" href="#news-[date]"
+ title="Link to this section">&para;</a>
+</h3>
+
+<p>We are pleased to announce the release of Apache Subversion [version]. This
+ release is not intended for production use, but is provided as a milestone
+ to encourage wider testing and feedback from intrepid users and maintainers.
+ Please see the
+ <a href="">release
+ announcement</a> for more information about this release, and the
+ <a href="/docs/release-notes/[major-minor].html">release notes</a> and
+ <a href="http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES">
+ change log</a> for information about what will eventually be
+ in the [version_base] release.</p>
+
+<p>To get this release from the nearest mirror, please visit our
+ <a href="/download.cgi#[anchor]">download page</a>.</p>
+
+</div> <!-- #news-[date] -->
+
diff --git a/tools/dist/templates/rc-release-ann.ezt b/tools/dist/templates/rc-release-ann.ezt
new file mode 100644
index 0000000..ca5f4d0
--- /dev/null
+++ b/tools/dist/templates/rc-release-ann.ezt
@@ -0,0 +1,69 @@
+From: ...@apache.org
+To: announce@subversion.apache.org, users@subversion.apache.org, dev@subversion.apache.org, announce@apache.org
+Subject: [[]ANNOUNCE] Apache Subversion [version] released
+
+I'm happy to announce the release of Apache Subversion [version].
+Please choose the mirror closest to you by visiting:
+
+ http://subversion.apache.org/download.cgi#[anchor]
+
+The SHA1 checksums are:
+
+[for sha1info] [sha1info.sha1] [sha1info.filename]
+[end]
+SHA-512 checksums are available at:
+
+ https://www.apache.org/dist/subversion/subversion-[version].tar.bz2.sha512
+ https://www.apache.org/dist/subversion/subversion-[version].tar.gz.sha512
+ https://www.apache.org/dist/subversion/subversion-[version].zip.sha512
+
+PGP Signatures are available at:
+
+ http://www.apache.org/dist/subversion/subversion-[version].tar.bz2.asc
+ http://www.apache.org/dist/subversion/subversion-[version].tar.gz.asc
+ http://www.apache.org/dist/subversion/subversion-[version].zip.asc
+
+For this release, the following people have provided PGP signatures:
+
+[siginfo]
+This is a pre-release for what will eventually become version [major-minor-patch] of the
+Apache Subversion open source version control system. It may contain known
+issues, a complete list of [major-minor-patch]-blocking issues can be found
+here:
+
+ https://issues.apache.org/jira/issues/?jql=project%20%3D%20SVN%20AND%20resolution%20%3D%20Unresolved%20AND%20fixVersion%20%3D%20[major-minor-patch]%20ORDER%20BY%20priority%20DESC%2C%20updated%20DESC
+
+A pre-release means the Subversion developers feel that this release
+is ready for widespread testing by the community. There are known issues
+(and unknown ones!), so please use it at your own risk, though we do
+encourage people to test this release thoroughly. Of particular note, please
+remember that persistent data, such as the working copy or repository
+formats may change before the final release, and there may not be an
+upgrade path from the pre-releases to the final.
+
+As a note to operating system distro packagers: while we wish to have this
+release candidate widely tested, we do not feel that it is ready for packaging
+and providing to end-users through a distro package system. Packaging a
+release candidate poses many problems, the biggest being that our policy lets
+us break compatibility between the release candidate and the final release, if
+we find something serious enough. Having many users depending on a release
+candidate through their distro would cause no end of pain and frustration that
+we do not want to have to deal with. However, if your distro has a branch that
+is clearly labeled as containing experimental and often broken software, and
+explicitly destined to consenting developers and integrators only, then we're
+okay with packaging the release candidate there. Just don't let it near the
+end users please.
+
+
+Release notes for the [major-minor].x release series may be found at:
+
+ http://subversion.apache.org/docs/release-notes/[major-minor].html
+
+You can find the list of changes between [version] and earlier versions at:
+
+ http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES
+
+Questions, comments, and bug reports to users@subversion.apache.org.
+
+Thanks,
+- The Subversion Team
diff --git a/tools/dist/templates/stable-news.ezt b/tools/dist/templates/stable-news.ezt
new file mode 100644
index 0000000..8fcaae9
--- /dev/null
+++ b/tools/dist/templates/stable-news.ezt
@@ -0,0 +1,22 @@
+<div class="h3" id="news-[date]">
+<h3>[date_pres] &mdash; Apache Subversion [version] Released
+ <a class="sectionlink" href="#news-[date]"
+ title="Link to this section">&para;</a>
+</h3>
+
+<p>We are pleased to announce the release of Apache Subversion [version].
+[if-any is_recommended] This is the most complete Subversion release to date, and we encourage
+ users of Subversion to upgrade as soon as reasonable.
+[else] This is the most complete release of the [major-minor].x line to date,
+ and we encourage all users to upgrade as soon as reasonable.
+[end] Please see the
+ <a href=""
+ >release announcement</a> and the
+ <a href="http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES"
+ >change log</a> for more information about this release.</p>
+
+<p>To get this release from the nearest mirror, please visit our
+ <a href="/download.cgi#[anchor]">download page</a>.</p>
+
+</div> <!-- #news-[date] -->
+
diff --git a/tools/dist/templates/stable-release-ann.ezt b/tools/dist/templates/stable-release-ann.ezt
new file mode 100644
index 0000000..2aec041
--- /dev/null
+++ b/tools/dist/templates/stable-release-ann.ezt
@@ -0,0 +1,51 @@
+From: ...@apache.org
+To: announce@subversion.apache.org, users@subversion.apache.org, dev@subversion.apache.org, announce@apache.org
+[if-any security]Cc: security@apache.org, oss-security@lists.openwall.com, bugtraq@securityfocus.com
+[end][if-any security]Subject: [[]SECURITY][[]ANNOUNCE] Apache Subversion [version] released
+[else]Subject: [[]ANNOUNCE] Apache Subversion [version] released
+[end]
+I'm happy to announce the release of Apache Subversion [version].
+Please choose the mirror closest to you by visiting:
+
+ http://subversion.apache.org/download.cgi#[anchor]
+[if-any dot-zero]
+This is a stable feature release of the Apache Subversion open source
+version control system.
+[else][if-any security]
+This is a stable bugfix and security release of the Apache Subversion
+open source version control system.
+[else]
+This is a stable bugfix release of the Apache Subversion open source
+version control system.
+[end][end]
+The SHA1 checksums are:
+
+[for sha1info] [sha1info.sha1] [sha1info.filename]
+[end]
+SHA-512 checksums are available at:
+
+ https://www.apache.org/dist/subversion/subversion-[version].tar.bz2.sha512
+ https://www.apache.org/dist/subversion/subversion-[version].tar.gz.sha512
+ https://www.apache.org/dist/subversion/subversion-[version].zip.sha512
+
+PGP Signatures are available at:
+
+ http://www.apache.org/dist/subversion/subversion-[version].tar.bz2.asc
+ http://www.apache.org/dist/subversion/subversion-[version].tar.gz.asc
+ http://www.apache.org/dist/subversion/subversion-[version].zip.asc
+
+For this release, the following people have provided PGP signatures:
+
+[siginfo]
+Release notes for the [major-minor].x release series may be found at:
+
+ http://subversion.apache.org/docs/release-notes/[major-minor].html
+
+You can find the list of changes between [version] and earlier versions at:
+
+ http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES
+
+Questions, comments, and bug reports to users@subversion.apache.org.
+
+Thanks,
+- The Subversion Team
diff --git a/tools/dist/test.sh b/tools/dist/test.sh
new file mode 100755
index 0000000..7a1be8a
--- /dev/null
+++ b/tools/dist/test.sh
@@ -0,0 +1,62 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+set -e
+
+[ -e $HOME/.svndistrc ] && . $HOME/.svndistrc
+
+set -x
+
+[ ! -e Makefile ] && ./configure $TEST_CONFIGURE_OPTIONS
+make
+make swig-py
+make swig-pl
+make swig-rb
+
+make check-swig-py 2>&1 | tee tests-py.log
+make check-swig-pl 2>&1 | tee tests-pl.log
+make check-swig-rb SWIG_RB_TEST_VERBOSE=verbose 2>&1 | tee tests-rb.log
+
+TEST_DIR=`pwd`/subversion/tests/cmdline/svn-test-work
+rm -rf "$TEST_DIR"
+mkdir "$TEST_DIR"
+sudo umount "$TEST_DIR" || true
+sudo mount -t tmpfs tmpfs "$TEST_DIR" -o uid=`id -u`,mode=700,size=32M
+
+time make check CLEANUP=1 FS_TYPE=fsfs
+mv tests.log tests-local-fsfs.log
+time make check CLEANUP=1 FS_TYPE=bdb
+mv tests.log tests-local-bdb.log
+
+./subversion/svnserve/svnserve -d -r `pwd`/subversion/tests/cmdline \
+ --listen-host 127.0.0.1 --listen-port 33690
+time make check CLEANUP=1 FS_TYPE=fsfs BASE_URL=svn://localhost:33690
+mv tests.log tests-svn-fsfs.log
+time make check CLEANUP=1 FS_TYPE=bdb BASE_URL=svn://localhost:33690
+mv tests.log tests-svn-bdb.log
+pkill lt-svnserve
+
+time CLEANUP=1 FS_TYPE=fsfs ./subversion/tests/cmdline/davautocheck.sh
+mv tests.log tests-dav-fsfs.log
+time CLEANUP=1 FS_TYPE=bdb ./subversion/tests/cmdline/davautocheck.sh
+mv tests.log tests-dav-bdb.log
+
+sudo umount "$TEST_DIR"
diff --git a/tools/examples/ExampleAuthn.java b/tools/examples/ExampleAuthn.java
new file mode 100644
index 0000000..8fb73b5
--- /dev/null
+++ b/tools/examples/ExampleAuthn.java
@@ -0,0 +1,108 @@
+/* Username/password prompt/save using 1.9 org.apache.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthn.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthn <URL> <config-dir>
+
+ */
+import org.apache.subversion.javahl.*;
+import org.apache.subversion.javahl.types.*;
+import org.apache.subversion.javahl.remote.*;
+import org.apache.subversion.javahl.callback.*;
+import java.io.Console;
+
+public class ExampleAuthn {
+
+ protected static class MyAuthn {
+
+ public static AuthnCallback getAuthn() {
+ return new MyAuthnCallback();
+ }
+
+ private static class MyAuthnCallback
+ implements AuthnCallback {
+
+ public UserPasswordResult
+ userPasswordPrompt(String realm, String username, boolean maySave) {
+ System.out.println("userPasswordPrompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ prompt = "Password for " + username + ": ";
+ }
+ String password = new String(System.console().readPassword(prompt));
+ return new UserPasswordResult(username, password, maySave);
+ }
+
+ public boolean
+ allowStorePlaintextPassword(String realm) {
+ System.out.println("allowStorePlaintextPassword");
+ System.out.println("Realm: " + realm);
+ System.out.print("Store plaintext password on disk? (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : false;
+ }
+
+ public UsernameResult
+ usernamePrompt(String realm, boolean maySave) {
+ System.out.println("usernamePrompt not implemented!");
+ return null;
+ }
+
+ public boolean
+ allowStorePlaintextPassphrase(String realm) {
+ System.out.println("allowStorePlaintextPassphrase not implemented!");
+ return false;
+ }
+
+ public SSLServerTrustResult
+ sslServerTrustPrompt(String realm,
+ SSLServerCertFailures failures,
+ SSLServerCertInfo info,
+ boolean maySave) {
+ System.out.println("sslServerTrustPrompt");
+ System.out.println("(r)eject or (t)emporary?");
+ String s = System.console().readLine();
+ return s.equals("t") ? SSLServerTrustResult.acceptTemporarily()
+ : SSLServerTrustResult.reject();
+ }
+
+ public SSLClientCertResult
+ sslClientCertPrompt(String realm, boolean maySave) {
+ System.out.println("sslClientCertPrompt not implemented!");
+ return null;
+ }
+
+ public SSLClientCertPassphraseResult
+ sslClientCertPassphrasePrompt(String realm, boolean maySave) {
+ System.out.println("sslClientCertPassphrasePrompt not implemented!");
+ return null;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthn <URL> <config-dir>");
+ return;
+ }
+ RemoteFactory factory = new RemoteFactory();
+ factory.setConfigDirectory(argv[1]);
+ factory.setPrompt(MyAuthn.getAuthn());
+ try {
+ ISVNRemote raSession = factory.openRemoteSession(argv[0]);
+ raSession.getReposUUID();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/ExampleAuthnOld.java b/tools/examples/ExampleAuthnOld.java
new file mode 100644
index 0000000..cbcd92d
--- /dev/null
+++ b/tools/examples/ExampleAuthnOld.java
@@ -0,0 +1,119 @@
+/* Username/password prompt/save using old org.apache.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthnOld.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthnOld <URL> <config-dir>
+
+ */
+import org.apache.subversion.javahl.*;
+import org.apache.subversion.javahl.types.*;
+import org.apache.subversion.javahl.callback.*;
+import java.io.Console;
+
+public class ExampleAuthnOld {
+
+ protected static class MyAuthn {
+
+ public static UserPasswordCallback getAuthn() {
+ return new MyUserPasswordCallback();
+ }
+
+ private static class MyUserPasswordCallback
+ implements UserPasswordCallback {
+
+ private String _username = null;
+
+ public String
+ getUsername() {
+ System.out.println("getUsername");
+ return _username;
+ }
+
+ private String _password = null;
+
+ public String
+ getPassword() {
+ System.out.println("getPassword");
+ return _password;
+ }
+
+ public boolean
+ userAllowedSave() {
+ System.out.println("userAllowedSave");
+ return true;
+ }
+
+ public boolean
+ askYesNo(String realm, String question, boolean yesIsDefault) {
+ System.out.println("askYesNo");
+ System.out.print(question + " (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : s.equals("") ? yesIsDefault : false;
+ }
+
+ public boolean
+ prompt(String realm, String username, boolean maySave) {
+ System.out.println("prompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ _username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ _username = username;
+ prompt = "Password for " + username + ": ";
+ }
+ _password = new String(System.console().readPassword(prompt));
+ return maySave;
+ }
+
+ public boolean
+ prompt(String realm, String username) {
+ System.out.println("prompt not implemented!");
+ return true;
+ }
+
+ public String
+ askQuestion(String realm,
+ String question,
+ boolean showAnswer,
+ boolean maySave) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public String
+ askQuestion(String realm, String question, boolean showAnswer) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public int
+ askTrustSSLServer(String info, boolean allowPermanently) {
+ System.out.println("askTrustSSLServer not implemented!");
+ return UserPasswordCallback.AcceptTemporary;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthnOld <URL> <config-dir>");
+ return;
+ }
+ ISVNClient client = new SVNClient();
+ client.setPrompt(MyAuthn.getAuthn());
+ try {
+ client.setConfigDirectory(argv[1]);
+ client.revProperty(argv[0], "svn:log", Revision.getInstance(0));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/ExampleAuthnVeryOld.java b/tools/examples/ExampleAuthnVeryOld.java
new file mode 100644
index 0000000..b2bd8d0
--- /dev/null
+++ b/tools/examples/ExampleAuthnVeryOld.java
@@ -0,0 +1,118 @@
+/* Username/password prompt/save using very old org.tigris.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthnVeryOld.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthnVeryOld <URL> <config-dir>
+
+ */
+import org.tigris.subversion.javahl.*;
+import java.io.Console;
+
+public class ExampleAuthnVeryOld {
+
+ protected static class MyAuthn {
+
+ public static PromptUserPassword3 getAuthn() {
+ return new MyUserPasswordCallback();
+ }
+
+ private static class MyUserPasswordCallback
+ implements PromptUserPassword3 {
+
+ private String _username = null;
+
+ public String
+ getUsername() {
+ System.out.println("getUsername");
+ return _username;
+ }
+
+ private String _password = null;
+
+ public String
+ getPassword() {
+ System.out.println("getPassword");
+ return _password;
+ }
+
+ public boolean
+ userAllowedSave() {
+ System.out.println("userAllowedSave");
+ return true;
+ }
+
+ public boolean
+ askYesNo(String realm, String question, boolean yesIsDefault) {
+ System.out.println("askYesNo");
+ System.out.print(question + " (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : s.equals("") ? yesIsDefault : false;
+ }
+
+ public boolean
+ prompt(String realm, String username, boolean maySave) {
+ System.out.println("prompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ _username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ _username = username;
+ prompt = "Password for " + username + ": ";
+ }
+ _password = new String(System.console().readPassword(prompt));
+ return maySave;
+ }
+
+ public boolean
+ prompt(String realm, String username) {
+ System.out.println("prompt not implemented!");
+ return true;
+ }
+
+ public String
+ askQuestion(String realm,
+ String question,
+ boolean showAnswer,
+ boolean maySave) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public String
+ askQuestion(String realm, String question, boolean showAnswer) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public int
+ askTrustSSLServer(String info, boolean allowPermanently) {
+ System.out.println("askTrustSSLServer not implemented!");
+ return PromptUserPassword3.AcceptTemporary;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthnVeryOld <URL> <config-dir>");
+ return;
+ }
+ SVNClientInterface client = new SVNClient();
+ client.setPrompt(MyAuthn.getAuthn());
+ try {
+ client.setConfigDirectory(argv[1]);
+ client.logMessages(argv[0], Revision.getInstance(0),
+ Revision.getInstance(0), false, false, 0);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/SvnCLBrowse b/tools/examples/SvnCLBrowse
new file mode 100755
index 0000000..fc4c765
--- /dev/null
+++ b/tools/examples/SvnCLBrowse
@@ -0,0 +1,489 @@
+#!/usr/bin/python
+#
+# SvnCLBrowse -- graphical Subversion changelist browser
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# This script requires Python 2.5
+
+import sys
+import os
+import getopt
+
+# Try to import the wxWidgets modules.
+try:
+ import wx
+ import wx.xrc
+except ImportError:
+ sys.stderr.write("""
+ERROR: This program requires the wxWidgets Python bindings, which you
+ do not appear to have installed.
+
+""")
+ raise
+
+# Try to import the Subversion modules.
+try:
+ import svn.client, svn.wc, svn.core
+except ImportError:
+ sys.stderr.write("""
+ERROR: This program requires the Subversion Python bindings, which you
+ do not appear to have installed.
+
+""")
+ raise
+
+status_code_map = {
+ svn.wc.status_none : ' ',
+ svn.wc.status_normal : ' ',
+ svn.wc.status_added : 'A',
+ svn.wc.status_missing : '!',
+ svn.wc.status_incomplete : '!',
+ svn.wc.status_deleted : 'D',
+ svn.wc.status_replaced : 'R',
+ svn.wc.status_modified : 'M',
+ svn.wc.status_merged : 'G',
+ svn.wc.status_conflicted : 'C',
+ svn.wc.status_obstructed : '~',
+ svn.wc.status_ignored : 'I',
+ svn.wc.status_external : 'X',
+ svn.wc.status_unversioned : '?',
+ }
+
+def output_info(path, info, window):
+ window.AppendText("Path: %s\n" % os.path.normpath(path))
+ if info.kind != svn.core.svn_node_dir:
+ window.AppendText("Name: %s\n" % os.path.basename(path))
+ if info.URL:
+ window.AppendText("URL: %s\n" % info.URL)
+ if info.repos_root_URL:
+ window.AppendText("Repository Root: %s\n" % info.repos_root_URL)
+ if info.repos_UUID:
+ window.AppendText("Repository UUID: %s\n" % info.repos_UUID)
+ if info.rev >= 0:
+ window.AppendText("Revision: %ld\n" % info.rev)
+ if info.kind == svn.core.svn_node_file:
+ window.AppendText("Node Kind: file\n")
+ elif info.kind == svn.core.svn_node_dir:
+ window.AppendText("Node Kind: directory\n")
+ elif info.kind == svn.core.svn_node_none:
+ window.AppendText("Node Kind: none\n")
+ else:
+ window.AppendText("Node Kind: unknown\n")
+ if info.has_wc_info:
+ if info.schedule == svn.wc.schedule_normal:
+ window.AppendText("Schedule: normal\n")
+ elif info.schedule == svn.wc.schedule_add:
+ window.AppendText("Schedule: add\n")
+ elif info.schedule == svn.wc.schedule_delete:
+ window.AppendText("Schedule: delete\n")
+ elif info.schedule == svn.wc.schedule_replace:
+ window.AppendText("Schedule: replace\n")
+ if info.depth == svn.core.svn_depth_unknown:
+ pass
+ elif info.depth == svn.core.svn_depth_empty:
+ window.AppendText("Depth: empty\n")
+ elif info.depth == svn.core.svn_depth_files:
+ window.AppendText("Depth: files\n")
+ elif info.depth == svn.core.svn_depth_immediates:
+ window.AppendText("Depth: immediates\n")
+ elif info.depth == svn.core.svn_depth_infinity:
+ pass
+ else:
+ window.AppendText("Depth: INVALID\n")
+ if info.copyfrom_url:
+ window.AppendText("Copied From URL: %s\n" % info.copyfrom_url)
+ if info.copyfrom_rev >= 0:
+ window.AppendText("Copied From Rev: %ld\n" % info.copyfrom_rev)
+ if info.last_changed_author:
+ window.AppendText("Last Changed Author: %s\n" % info.last_changed_author)
+ if info.last_changed_rev >= 0:
+ window.AppendText("Last Changed Rev: %ld\n" % info.last_changed_rev)
+ if info.last_changed_date:
+ window.AppendText("Last Changed Date: %s\n" %
+ svn.core.svn_time_to_human_cstring(info.last_changed_date))
+ if info.has_wc_info:
+ if info.text_time:
+ window.AppendText("Text Last Updated: %s\n" %
+ svn.core.svn_time_to_human_cstring(info.text_time))
+ if info.prop_time:
+ window.AppendText("Properties Last Updated: %s\n" %
+ svn.core.svn_time_to_human_cstring(info.prop_time))
+ if info.checksum:
+ window.AppendText("Checksum: %s\n" % info.checksum)
+ if info.conflict_old:
+ window.AppendText("Conflict Previous Base File: %s\n" % info.conflict_old)
+ if info.conflict_wrk:
+ window.AppendText("Conflict Previous Working File: %s\n" % info.conflict_wrk)
+ if info.conflict_new:
+ window.AppendText("Conflict Current Base File: %s\n" % info.conflict_new)
+ if info.prejfile:
+ window.AppendText("Conflict Properties File: %s\n" % info.prejfile)
+ if info.lock:
+ if info.lock.token:
+ window.AppendText("Lock Token: %s\n" % info.lock.token)
+ if info.lock.owner:
+ window.AppendText("Lock Owner: %s\n" % info.lock.owner)
+ if info.lock.creation_date:
+ window.AppendText("Lock Created: %s\n" %
+ svn.core.svn_time_to_human_cstring(info.lock.creation_date))
+ if info.lock.expiration_date:
+ window.AppendText("Lock Expires: %s\n" %
+ svn.core.svn_time_to_human_cstring(info.lock.expiration_date))
+ if info.lock.comment:
+ num_lines = len(info.lock.comment.split("\n"))
+ window.AppendText("Lock Comment (%d line%s): %s\n"
+ % (num_lines, num_lines > 1 and "s" or "", info.lock.comment))
+ if info.changelist:
+ window.AppendText("Changelist: %s\n" % info.changelist)
+ window.AppendText("\n")
+
+class _item:
+ pass
+
+class SvnCLBrowse(wx.App):
+ def __init__(self, wc_dir):
+ svn.core.svn_config_ensure(None)
+ self.svn_ctx = svn.client.svn_client_create_context()
+ self.svn_ctx.config = svn.core.svn_config_get_config(None)
+ if wc_dir is not None:
+ self.wc_dir = svn.core.svn_path_canonicalize(wc_dir)
+ else:
+ self.wc_dir = wc_dir
+ wx.App.__init__(self)
+
+ def OnInit(self):
+ self.SetAppName("SvnCLBrowse")
+
+ self.xrc = wx.xrc.EmptyXmlResource()
+ wx.FileSystem.AddHandler(wx.MemoryFSHandler())
+ wx.MemoryFSHandler.AddFile('XRC/SvnCLBrowse.xrc', _XML_RESOURCE)
+ self.xrc.Load('memory:XRC/SvnCLBrowse.xrc')
+
+ # XML Resource stuff.
+ self.resources = _item()
+ self.resources.CLBFrame = self.xrc.LoadFrame(None, 'CLBFrame')
+ self.resources.CLBMenuBar = self.xrc.LoadMenuBar('CLBMenuBar')
+ self.resources.CLBMenuFileQuit = self.xrc.GetXRCID('CLBMenuFileQuit')
+ self.resources.CLBMenuOpsInfo = self.xrc.GetXRCID('CLBMenuOpsInfo')
+ self.resources.CLBMenuOpsMembers = self.xrc.GetXRCID('CLBMenuOpsMembers')
+ self.resources.CLBMenuHelpAbout = self.xrc.GetXRCID('CLBMenuHelpAbout')
+ self.resources.CLBDirNav = self.resources.CLBFrame.FindWindowById(
+ self.xrc.GetXRCID('CLBDirNav'))
+ self.resources.CLBChangelists = self.resources.CLBFrame.FindWindowById(
+ self.xrc.GetXRCID('CLBChangelists'))
+ self.resources.CLBVertSplitter = self.resources.CLBFrame.FindWindowById(
+ self.xrc.GetXRCID('CLBVertSplitter'))
+ self.resources.CLBHorzSplitter = self.resources.CLBFrame.FindWindowById(
+ self.xrc.GetXRCID('CLBHorzSplitter'))
+ self.resources.CLBOutput = self.resources.CLBFrame.FindWindowById(
+ self.xrc.GetXRCID('CLBOutput'))
+ self.resources.CLBStatusBar = self.resources.CLBFrame.CreateStatusBar(2)
+
+ # Glue some of our extra stuff onto the main frame.
+ self.resources.CLBFrame.SetMenuBar(self.resources.CLBMenuBar)
+ self.resources.CLBStatusBar.SetStatusWidths([-1, 100])
+
+ # Event handlers. They are the key to the world.
+ wx.EVT_CLOSE(self.resources.CLBFrame, self._FrameClosure)
+ wx.EVT_MENU(self, self.resources.CLBMenuFileQuit, self._FileQuitMenu)
+ wx.EVT_MENU(self, self.resources.CLBMenuOpsInfo, self._OpsInfoMenu)
+ wx.EVT_MENU(self, self.resources.CLBMenuOpsMembers, self._OpsMembersMenu)
+ wx.EVT_MENU(self, self.resources.CLBMenuHelpAbout, self._HelpAboutMenu)
+ wx.EVT_TREE_ITEM_ACTIVATED(self, self.resources.CLBDirNav.GetTreeCtrl().Id,
+ self._DirNavSelChanged)
+
+ # Reset our working directory
+ self._SetWorkingDirectory(self.wc_dir)
+
+ # Resize and display our frame.
+ self.resources.CLBFrame.SetSize(wx.Size(600, 400))
+ self.resources.CLBFrame.Center()
+ self.resources.CLBFrame.Show(True)
+ self.resources.CLBVertSplitter.SetSashPosition(
+ self.resources.CLBVertSplitter.GetSize()[0] / 2)
+ self.resources.CLBHorzSplitter.SetSashPosition(
+ self.resources.CLBHorzSplitter.GetSize()[1] / 2)
+
+ # Tell wxWidgets that this is our main window
+ self.SetTopWindow(self.resources.CLBFrame)
+
+ # Return a success flag
+ return True
+
+ def _SetWorkingDirectory(self, wc_dir):
+ if wc_dir is None:
+ return
+ if not os.path.isdir(wc_dir):
+ wc_dir = os.path.abspath('/')
+ self.wc_dir = os.path.abspath(wc_dir)
+ self.resources.CLBChangelists.Clear()
+ self.resources.CLBDirNav.SetPath(self.wc_dir)
+ self.resources.CLBFrame.SetTitle("SvnCLBrowse - %s" % (self.wc_dir))
+ changelists = {}
+ self.resources.CLBFrame.SetStatusText("Checking '%s' for status..." \
+ % (self.wc_dir))
+ wx.BeginBusyCursor()
+
+ def _status_callback(path, status, clists=changelists):
+ if status.entry and status.entry.changelist:
+ clists[status.entry.changelist] = None
+
+ # Do the status crawl, using _status_callback() as our callback function.
+ revision = svn.core.svn_opt_revision_t()
+ revision.type = svn.core.svn_opt_revision_head
+ try:
+ svn.client.status2(self.wc_dir, revision, _status_callback,
+ svn.core.svn_depth_infinity,
+ False, False, False, True, self.svn_ctx)
+ except svn.core.SubversionException:
+ self.resources.CLBStatusBar.SetStatusText("UNVERSIONED", 2)
+ else:
+ changelist_names = changelists.keys()
+ changelist_names.sort()
+ for changelist in changelist_names:
+ self.resources.CLBChangelists.Append(changelist)
+ finally:
+ wx.EndBusyCursor()
+ self.resources.CLBFrame.SetStatusText("")
+
+ def _Destroy(self):
+ self.resources.CLBFrame.Destroy()
+
+ def _DirNavSelChanged(self, event):
+ self._SetWorkingDirectory(self.resources.CLBDirNav.GetPath())
+
+ def _GetSelectedChangelists(self):
+ changelists = []
+ items = self.resources.CLBChangelists.GetSelections()
+ for item in items:
+ changelists.append(str(self.resources.CLBChangelists.GetString(item)))
+ return changelists
+
+ def _OpsMembersMenu(self, event):
+ self.resources.CLBOutput.Clear()
+ changelists = self._GetSelectedChangelists()
+ if not changelists:
+ return
+
+ def _info_receiver(path, info, pool):
+ self.resources.CLBOutput.AppendText(" %s\n" % (path))
+
+ for changelist in changelists:
+ self.resources.CLBOutput.AppendText("Changelist: %s\n" % (changelist))
+ revision = svn.core.svn_opt_revision_t()
+ revision.type = svn.core.svn_opt_revision_working
+ svn.client.info2(self.wc_dir, revision, revision,
+ _info_receiver, svn.core.svn_depth_infinity,
+ [changelist], self.svn_ctx)
+ self.resources.CLBOutput.AppendText("\n")
+
+ def _OpsInfoMenu(self, event):
+ self.resources.CLBOutput.Clear()
+ changelists = self._GetSelectedChangelists()
+ if not changelists:
+ return
+
+ def _info_receiver(path, info, pool):
+ output_info(path, info, self.resources.CLBOutput)
+
+ revision = svn.core.svn_opt_revision_t()
+ revision.type = svn.core.svn_opt_revision_working
+ svn.client.info2(self.wc_dir, revision, revision,
+ _info_receiver, svn.core.svn_depth_infinity,
+ changelists, self.svn_ctx)
+
+ def _FrameClosure(self, event):
+ self._Destroy()
+
+ def _FileQuitMenu(self, event):
+ self._Destroy()
+
+ def _HelpAboutMenu(self, event):
+ wx.MessageBox("SvnCLBrowse"
+ " -- graphical Subversion changelist browser.\n\n",
+ "About SvnCLBrowse",
+ wx.OK | wx.CENTER,
+ self.resources.CLBFrame)
+
+ def OnExit(self):
+ pass
+
+
+_XML_RESOURCE = """<?xml version="1.0" ?>
+<resource>
+ <object class="wxMenuBar" name="CLBMenuBar">
+ <object class="wxMenu">
+ <label>&amp;File</label>
+ <object class="wxMenuItem" name="CLBMenuFileQuit">
+ <label>&amp;Quit</label>
+ <accel>CTRL+Q</accel>
+ <help>Quit SvnCLBrowse.</help>
+ </object>
+ </object>
+ <object class="wxMenu">
+ <label>&amp;Subversion</label>
+ <object class="wxMenuItem" name="CLBMenuOpsInfo">
+ <label>&amp;Info</label>
+ <help>Show information about members of the selected changelist(s).</help>
+ </object>
+ <object class="wxMenuItem" name="CLBMenuOpsMembers">
+ <label>&amp;Members</label>
+ <help>List the members of the selected changelist(s).</help>
+ </object>
+ </object>
+ <object class="wxMenu">
+ <label>&amp;Help</label>
+ <object class="wxMenuItem" name="CLBMenuHelpAbout">
+ <label>&amp;About...</label>
+ <help>About SvnCLBrowse.</help>
+ </object>
+ </object>
+ </object>
+ <object class="wxFrame" name="CLBFrame">
+ <title>SvnCLBrowse -- graphical Subversion changelist browser</title>
+ <centered>1</centered>
+ <style>wxDEFAULT_FRAME_STYLE|wxCAPTION|wxSYSTEM_MENU|wxRESIZE_BORDER|wxRESIZE_BOX|wxMAXIMIZE_BOX|wxMINIMIZE_BOX|wxTAB_TRAVERSAL</style>
+ <object class="wxFlexGridSizer">
+ <cols>1</cols>
+ <rows>1</rows>
+ <object class="sizeritem">
+ <object class="wxSplitterWindow" name="CLBVertSplitter">
+ <object class="wxPanel">
+ <object class="wxFlexGridSizer">
+ <cols>1</cols>
+ <rows>3</rows>
+ <growablecols>0</growablecols>
+ <growablerows>0</growablerows>
+ <growablerows>1</growablerows>
+ <growablerows>2</growablerows>
+ <object class="sizeritem">
+ <object class="wxSplitterWindow" name="CLBHorzSplitter">
+ <orientation>horizontal</orientation>
+ <sashpos>200</sashpos>
+ <minsize>50</minsize>
+ <style>wxSP_NOBORDER|wxSP_LIVE_UPDATE</style>
+ <object class="wxPanel">
+ <object class="wxStaticBoxSizer">
+ <label>Local Modifications</label>
+ <orient>wxHORIZONTAL</orient>
+ <object class="sizeritem">
+ <object class="wxGenericDirCtrl" name="CLBDirNav">
+ <style>wxDIRCTRL_DIR_ONLY</style>
+ </object>
+ <flag>wxEXPAND</flag>
+ <option>1</option>
+ </object>
+ </object>
+ </object>
+ <object class="wxPanel">
+ <object class="wxStaticBoxSizer">
+ <label>Changelists</label>
+ <orient>wxHORIZONTAL</orient>
+ <object class="sizeritem">
+ <object class="wxListBox" name="CLBChangelists">
+ <content>
+ <item/></content>
+ <style>wxLB_MULTIPLE</style>
+ </object>
+ <option>1</option>
+ <flag>wxALL|wxEXPAND</flag>
+ </object>
+ </object>
+ </object>
+ </object>
+ <flag>wxEXPAND</flag>
+ <option>1</option>
+ </object>
+ </object>
+ </object>
+ <object class="wxPanel">
+ <object class="wxFlexGridSizer">
+ <cols>1</cols>
+ <object class="sizeritem">
+ <object class="wxStaticBoxSizer">
+ <label>Output</label>
+ <orient>wxVERTICAL</orient>
+ <object class="sizeritem">
+ <object class="wxTextCtrl" name="CLBOutput">
+ <style>wxTE_MULTILINE|wxTE_READONLY|wxTE_LEFT|wxTE_DONTWRAP</style>
+ </object>
+ <option>1</option>
+ <flag>wxEXPAND</flag>
+ </object>
+ </object>
+ <option>1</option>
+ <flag>wxALL|wxEXPAND</flag>
+ <border>5</border>
+ </object>
+ <rows>1</rows>
+ <growablecols>0</growablecols>
+ <growablerows>0</growablerows>
+ </object>
+ </object>
+ <orientation>vertical</orientation>
+ <sashpos>130</sashpos>
+ <minsize>50</minsize>
+ <style>wxSP_NOBORDER|wxSP_LIVE_UPDATE</style>
+ </object>
+ <option>1</option>
+ <flag>wxEXPAND</flag>
+ </object>
+ <growablecols>0</growablecols>
+ <growablerows>0</growablerows>
+ </object>
+ </object>
+</resource>
+"""
+
+def usage_and_exit(errmsg=None):
+ stream = errmsg and sys.stderr or sys.stdout
+ progname = os.path.basename(sys.argv[0])
+ stream.write("""%s -- graphical Subversion changelist browser
+
+Usage: %s [DIRECTORY]
+
+Launch the SvnCLBrowse graphical changelist browser, using DIRECTORY
+(or the current working directory, if DIRECTORY is not provided) as
+the initial browse location.
+
+""" % (progname, progname))
+ if errmsg:
+ stream.write("ERROR: %s\n" % (errmsg))
+ sys.exit(errmsg and 1 or 0)
+
+def main():
+ opts, args = getopt.gnu_getopt(sys.argv[1:], 'h?', ['help'])
+ for name, value in opts:
+ if name == '-h' or name == '-?' or name == '--help':
+ usage_and_exit()
+ argc = len(args)
+ if argc == 0:
+ wc_dir = '.'
+ elif argc == 1:
+ wc_dir = sys.argv[1]
+ else:
+ usage_and_exit("Too many arguments")
+ app = SvnCLBrowse(wc_dir)
+ app.MainLoop()
+ app.OnExit()
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/examples/blame.py b/tools/examples/blame.py
new file mode 100755
index 0000000..14368e5
--- /dev/null
+++ b/tools/examples/blame.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: blame.py [-r REV] repos-path file
+#
+
+import sys
+import os
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import difflib
+from svn import fs, core, repos
+
+CHUNK_SIZE = 100000
+
+def blame(path, filename, rev=None):
+
+ annotresult = {}
+ path = core.svn_path_canonicalize(path)
+
+ repos_ptr = repos.open(path)
+ fsob = repos.fs(repos_ptr)
+
+ if rev is None:
+ rev = fs.youngest_rev(fsob)
+ filedata = ''
+ for i in range(0, rev+1):
+ root = fs.revision_root(fsob, i)
+ if fs.check_path(root, filename) != core.svn_node_none:
+ first = i
+ break
+ print("First revision is %d" % first)
+ print("Last revision is %d" % rev)
+ for i in range(first, rev+1):
+ previousroot = root
+ root = fs.revision_root(fsob, i)
+ if i != first:
+ if not fs.contents_changed(root, filename, previousroot, filename):
+ continue
+
+ file = fs.file_contents(root, filename)
+ previousdata = filedata
+ filedata = ''
+ while True:
+ data = core.svn_stream_read(file, CHUNK_SIZE)
+ if not data:
+ break
+ filedata = filedata + data
+
+ print("Current revision is %d" % i)
+ diffresult = difflib.ndiff(previousdata.splitlines(1),
+ filedata.splitlines(1))
+ # print ''.join(diffresult)
+ k = 0
+ for j in diffresult:
+ if j[0] == ' ':
+ if k in annotresult:
+ k = k + 1
+ continue
+ else:
+ annotresult[k] = (i, j[2:])
+ k = k + 1
+ continue
+ elif j[0] == '?':
+ continue
+ annotresult[k] = (i, j[2:])
+ if j[0] != '-':
+ k = k + 1
+# print ''.join(diffresult)
+# print annotresult
+ for x in range(len(annotresult.keys())):
+ sys.stdout.write("Line %d (r%d):%s" % (x,
+ annotresult[x][0],
+ annotresult[x][1]))
+
+def usage():
+ print("USAGE: blame.py [-r REV] repos-path file")
+ sys.exit(1)
+
+def main():
+ opts, args = getopt.getopt(sys.argv[1:], 'r:')
+ if len(args) != 2:
+ usage()
+ rev = None
+ for name, value in opts:
+ if name == '-r':
+ rev = int(value)
+ blame(args[0], args[1], rev)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/check-modified.py b/tools/examples/check-modified.py
new file mode 100755
index 0000000..dff3fa1
--- /dev/null
+++ b/tools/examples/check-modified.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: check-modified.py FILE_OR_DIR1 FILE_OR_DIR2 ...
+#
+# prints out the URL associated with each item
+#
+
+import sys
+import os
+import os.path
+import svn.core
+import svn.client
+import svn.wc
+
+FORCE_COMPARISON = 0
+
+def usage():
+ print("Usage: " + sys.argv[0] + " FILE_OR_DIR1 FILE_OR_DIR2\n")
+ sys.exit(0)
+
+def run(files):
+
+ for f in files:
+ dirpath = fullpath = os.path.abspath(f)
+ if not os.path.isdir(dirpath):
+ dirpath = os.path.dirname(dirpath)
+
+ adm_baton = svn.wc.adm_open(None, dirpath, False, True)
+
+ try:
+ entry = svn.wc.entry(fullpath, adm_baton, 0)
+
+ if svn.wc.text_modified_p(fullpath, FORCE_COMPARISON,
+ adm_baton):
+ print("M %s" % f)
+ else:
+ print(" %s" % f)
+ except:
+ print("? %s" % f)
+
+ svn.wc.adm_close(adm_baton)
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
+
diff --git a/tools/examples/dumpprops.py b/tools/examples/dumpprops.py
new file mode 100755
index 0000000..09c5e6b
--- /dev/null
+++ b/tools/examples/dumpprops.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: dumprops.py [-r REV] repos-path [file]
+#
+# dump out the properties on a given path (recursively if given a dir)
+#
+
+import sys
+import os
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import pprint
+
+from svn import fs, core, repos
+
+
+def dumpprops(path, filename='', rev=None):
+ path = core.svn_path_canonicalize(path)
+ repos_ptr = repos.open(path)
+ fsob = repos.fs(repos_ptr)
+
+ if rev is None:
+ rev = fs.youngest_rev(fsob)
+
+ root = fs.revision_root(fsob, rev)
+ print_props(root, filename)
+ if fs.is_dir(root, filename):
+ walk_tree(root, filename)
+
+def print_props(root, path):
+ raw_props = fs.node_proplist(root, path)
+ # need to massage some buffers into strings for printing
+ props = { }
+ for key, value in raw_props.items():
+ props[key] = str(value)
+
+ print('--- %s' % path)
+ pprint.pprint(props)
+
+def walk_tree(root, path):
+ for name in fs.dir_entries(root, path).keys():
+ full = path + '/' + name
+ print_props(root, full)
+ if fs.is_dir(root, full):
+ walk_tree(root, full)
+
+def usage():
+ print("USAGE: dumpprops.py [-r REV] repos-path [file]")
+ sys.exit(1)
+
+def main():
+ opts, args = my_getopt(sys.argv[1:], 'r:')
+ rev = None
+ for name, value in opts:
+ if name == '-r':
+ rev = int(value)
+ if len(args) == 2:
+ dumpprops(args[0], args[1], rev)
+ elif len(args) == 1:
+ dumpprops(args[0], "", rev)
+ else:
+ usage()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/get-location-segments.py b/tools/examples/get-location-segments.py
new file mode 100755
index 0000000..705ffc5
--- /dev/null
+++ b/tools/examples/get-location-segments.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+import sys
+import os
+import getpass
+from svn import client, ra, core
+
+def printer(segment, pool):
+ path = segment.path is not None and segment.path or "(null)"
+ print("r%d-r%d: %s" % (segment.range_start, segment.range_end, path))
+
+
+def parse_args(args):
+ argc = len(sys.argv)
+
+ # parse the target URL and optional peg revision
+ path_pieces = args[0].split('@')
+ if len(path_pieces) > 1:
+ peg_revision = int(path_pieces[-1])
+ assert peg_revision >= 0
+ url = '@'.join(path_pieces[:-1])
+ else:
+ peg_revision = core.SVN_INVALID_REVNUM
+ url = path_pieces[0]
+ url = core.svn_path_canonicalize(url)
+
+ # parse the revision range, if any
+ if argc > 2:
+ rev_pieces = args[1].split(':')
+ num_revs = len(rev_pieces)
+ assert num_revs < 3
+ if num_revs == 2:
+ start_revision = int(rev_pieces[0])
+ end_revision = int(rev_pieces[1])
+ else:
+ start_revision = end_revision = int(rev_pieces[0])
+ assert(start_revision >= 0)
+ assert(end_revision >= 0)
+ else:
+ start_revision = peg_revision
+ end_revision = 0
+
+ # validate
+ if start_revision >= 0 \
+ and end_revision >= 0 \
+ and end_revision > start_revision:
+ raise Exception("End revision must not be younger than start revision")
+ if peg_revision >= 0 \
+ and start_revision >= 0 \
+ and start_revision > peg_revision:
+ raise Exception("Start revision must not be younger than peg revision")
+
+ return url, peg_revision, start_revision, end_revision
+
+
+def prompt_func_ssl_unknown_cert(realm, failures, cert_info, may_save, pool):
+ print( "The certficate details are as follows:")
+ print("--------------------------------------")
+ print("Issuer : " + str(cert_info.issuer_dname))
+ print("Hostname : " + str(cert_info.hostname))
+ print("ValidFrom : " + str(cert_info.valid_from))
+ print("ValidUpto : " + str(cert_info.valid_until))
+ print("Fingerprint: " + str(cert_info.fingerprint))
+ print("")
+ ssl_trust = core.svn_auth_cred_ssl_server_trust_t()
+ if may_save:
+ choice = raw_input( "accept (t)temporarily (p)permanently: ")
+ else:
+ choice = raw_input( "(r)Reject or accept (t)temporarily: ")
+ if choice[0] == "t" or choice[0] == "T":
+ ssl_trust.may_save = False
+ ssl_trust.accepted_failures = failures
+ elif choice[0] == "p" or choice[0] == "P":
+ ssl_trust.may_save = True
+ ssl_trust.accepted_failures = failures
+ else:
+ ssl_trust = None
+ return ssl_trust
+
+def prompt_func_simple_prompt(realm, username, may_save, pool):
+ username = raw_input("username: ")
+ password = getpass.getpass(prompt="password: ")
+ simple_cred = core.svn_auth_cred_simple_t()
+ simple_cred.username = username
+ simple_cred.password = password
+ simple_cred.may_save = False
+ return simple_cred
+
+def prompt_func_gnome_keyring_prompt(keyring, pool):
+ return getpass.getpass(prompt="Password for '%s' GNOME keyring: " % keyring)
+
+def main():
+ try:
+ url, peg_revision, start_revision, end_revision = parse_args(sys.argv[1:])
+ except Exception as e:
+ sys.stderr.write("""Usage: %s URL[@PEG-REV] [START-REV[:END-REV]]
+
+Trace the history of URL@PEG-REV, printing the location(s) of its
+existence between START-REV and END-REV. If START-REV is not
+provided, the entire history of URL@PEG-REV back to its origin will be
+displayed. If provided, START-REV must not be younger than PEG-REV.
+If END-REV is provided, it must not be younger than START-REV.
+
+(This is a wrapper around Subversion's svn_ra_get_location_segments() API.)
+
+ERROR: %s
+""" % (os.path.basename(sys.argv[0]), str(e)))
+ sys.exit(1)
+
+ core.svn_config_ensure(None)
+ ctx = client.svn_client_create_context()
+ ctx.config = core.svn_config_get_config(None)
+
+ # Make sure that these are at the start of the list, so passwords from
+ # gnome-keyring / kwallet are checked before asking for new passwords.
+ providers = core.svn_auth_get_platform_specific_client_providers(ctx.config['config'], None)
+ providers.extend([
+ client.get_simple_provider(),
+ core.svn_auth_get_ssl_server_trust_file_provider(),
+ core.svn_auth_get_simple_prompt_provider(prompt_func_simple_prompt, 2),
+ core.svn_auth_get_ssl_server_trust_prompt_provider(prompt_func_ssl_unknown_cert),
+ client.get_username_provider(),
+ client.get_ssl_server_trust_file_provider(),
+ client.get_ssl_client_cert_file_provider(),
+ client.get_ssl_client_cert_pw_file_provider(),
+ ])
+
+ ctx.auth_baton = core.svn_auth_open(providers)
+
+ if hasattr(core, 'svn_auth_set_gnome_keyring_unlock_prompt_func'):
+ core.svn_auth_set_gnome_keyring_unlock_prompt_func(ctx.auth_baton, prompt_func_gnome_keyring_prompt)
+
+ ra_callbacks = ra.callbacks_t()
+ ra_callbacks.auth_baton = ctx.auth_baton
+ ra_session = ra.open(url, ra_callbacks, None, ctx.config)
+ ra.get_location_segments(ra_session, "", peg_revision,
+ start_revision, end_revision, printer)
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/examples/getfile.py b/tools/examples/getfile.py
new file mode 100755
index 0000000..b8db211
--- /dev/null
+++ b/tools/examples/getfile.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: getfile.py [-r REV] repos-path file
+#
+# gets a file from an SVN repository, puts it to sys.stdout
+#
+
+import sys
+import os
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+from svn import fs, core, repos
+
+CHUNK_SIZE = 16384
+
+def getfile(path, filename, rev=None):
+ path = core.svn_path_canonicalize(path)
+ repos_ptr = repos.open(path)
+ fsob = repos.fs(repos_ptr)
+
+ if rev is None:
+ rev = fs.youngest_rev(fsob)
+ print("Using youngest revision %s" % rev)
+
+ root = fs.revision_root(fsob, rev)
+ file = fs.file_contents(root, filename)
+ while True:
+ data = core.svn_stream_read(file, CHUNK_SIZE)
+ if not data:
+ break
+ sys.stdout.write(data)
+
+def usage():
+ print("USAGE: getfile.py [-r REV] repos-path file")
+ sys.exit(1)
+
+def main():
+ opts, args = my_getopt(sys.argv[1:], 'r:')
+ if len(args) != 2:
+ usage()
+ rev = None
+ for name, value in opts:
+ if name == '-r':
+ rev = int(value)
+ getfile(args[0], args[1], rev)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/getlocks_test.c b/tools/examples/getlocks_test.c
new file mode 100644
index 0000000..1051063
--- /dev/null
+++ b/tools/examples/getlocks_test.c
@@ -0,0 +1,271 @@
+/*
+ * getlocks_test.c : show all repository locks living below a URL
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * To compile on unix against Subversion and APR libraries, try
+ * something like:
+ *
+ * cc getlocks_test.c -o getlocks_test \
+ * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \
+ * -L/usr/local/apache2/lib -L/usr/local/lib \
+ * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lapr-0 -laprutil-0
+ *
+ */
+
+#include "svn_client.h"
+#include "svn_pools.h"
+#include "svn_config.h"
+#include "svn_cmdline.h"
+#include "svn_time.h"
+#include "svn_fs.h"
+#include "svn_path.h"
+
+/* Display a prompt and read a one-line response into the provided buffer,
+ removing a trailing newline if present. */
+static svn_error_t *
+prompt_and_read_line(const char *prompt,
+ char *buffer,
+ size_t max)
+{
+ int len;
+ printf("%s: ", prompt);
+ if (fgets(buffer, max, stdin) == NULL)
+ return svn_error_create(0, NULL, "error reading stdin");
+ len = strlen(buffer);
+ if (len > 0 && buffer[len-1] == '\n')
+ buffer[len-1] = 0;
+ return SVN_NO_ERROR;
+}
+
+/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For
+ a much better example, see svn_cl__auth_simple_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_simple_prompt_callback (svn_auth_cred_simple_t **cred,
+ void *baton,
+ const char *realm,
+ const char *username,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ if (username)
+ ret->username = apr_pstrdup (pool, username);
+ else
+ {
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+ }
+
+ SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf)));
+ ret->password = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For
+ a much better example, see svn_cl__auth_username_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_username_prompt_callback (svn_auth_cred_username_t **cred,
+ void *baton,
+ const char *realm,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A callback function used when the RA layer needs a handle to a
+ temporary file. This is a reduced version of the callback used in
+ the official svn cmdline client. */
+static svn_error_t *
+open_tmp_file (apr_file_t **fp,
+ void *callback_baton,
+ apr_pool_t *pool)
+{
+ const char *path;
+ const char *ignored_filename;
+
+ SVN_ERR (svn_io_temp_dir (&path, pool));
+ path = svn_path_join (path, "tempfile", pool);
+
+ /* Open a unique file, with delete-on-close set. */
+ SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename,
+ path, ".tmp",
+ svn_io_file_del_on_close, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+
+int
+main (int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+ apr_hash_t *locks;
+ apr_hash_index_t *hi;
+ const char *URL;
+ svn_ra_session_t *session;
+ svn_ra_callbacks_t *cbtable;
+ apr_hash_t *cfg_hash;
+ svn_auth_baton_t *auth_baton;
+
+ if (argc <= 1)
+ {
+ printf ("Usage: %s URL\n", argv[0]);
+ printf (" Print all locks at or below URL.\n");
+ return EXIT_FAILURE;
+ }
+ URL = argv[1];
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init ("ra_test", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create top-level memory pool. Be sure to read the HACKING file to
+ understand how to properly use/free subpools. */
+ pool = svn_pool_create (NULL);
+
+ /* Initialize the FS library. */
+ err = svn_fs_initialize (pool);
+ if (err) goto hit_error;
+
+ /* Make sure the ~/.subversion run-time config files exist, and load. */
+ err = svn_config_ensure (NULL, pool);
+ if (err) goto hit_error;
+
+ err = svn_config_get_config (&cfg_hash, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Build an authentication baton. */
+ {
+ /* There are many different kinds of authentication back-end
+ "providers". See svn_auth.h for a full overview. */
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers
+ = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *));
+
+ svn_client_get_simple_prompt_provider (&provider,
+ my_simple_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ svn_client_get_username_prompt_provider (&provider,
+ my_username_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ /* Register the auth-providers into the context's auth_baton. */
+ svn_auth_open (&auth_baton, providers, pool);
+ }
+
+ /* Create a table of callbacks for the RA session, mostly nonexistent. */
+ cbtable = apr_pcalloc (pool, sizeof(*cbtable));
+ cbtable->auth_baton = auth_baton;
+ cbtable->open_tmp_file = open_tmp_file;
+
+ /* Now do the real work. */
+
+ err = svn_ra_open (&session, URL, cbtable, NULL, cfg_hash, pool);
+ if (err) goto hit_error;
+
+ err = svn_ra_get_locks (session, &locks, "", pool);
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool, "\n");
+ if (err) goto hit_error;
+
+ for (hi = apr_hash_first (pool, locks); hi; hi = apr_hash_next (hi))
+ {
+ const void *key;
+ void *val;
+ const char *path, *cr_date, *exp_date;
+ svn_lock_t *lock;
+
+ apr_hash_this (hi, &key, NULL, &val);
+ path = key;
+ lock = val;
+
+ cr_date = svn_time_to_human_cstring (lock->creation_date, pool);
+
+ if (lock->expiration_date)
+ exp_date = svn_time_to_human_cstring (lock->expiration_date, pool);
+ else
+ exp_date = "never";
+
+ err = svn_cmdline_printf (pool, "%s\n", path);
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool,
+ " UUID Token: %s\n", lock->token);
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool,
+ " Owner: %s\n", lock->owner);
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool,
+ " Comment: %s\n",
+ lock->comment ? lock->comment : "none");
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool,
+ " Created: %s\n", cr_date);
+ if (err) goto hit_error;
+
+ err = svn_cmdline_printf (pool,
+ " Expires: %s\n\n", exp_date);
+ if (err) goto hit_error;
+ }
+
+ return EXIT_SUCCESS;
+
+ hit_error:
+ svn_handle_error2 (err, stderr, FALSE, "getlocks_test: ");
+ return EXIT_FAILURE;
+}
diff --git a/tools/examples/geturl.py b/tools/examples/geturl.py
new file mode 100755
index 0000000..d50e22e
--- /dev/null
+++ b/tools/examples/geturl.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: geturl.py FILE_OR_DIR1 FILE_OR_DIR2 ...
+#
+# prints out the URL associated with each item
+#
+
+import os
+import sys
+
+import svn.wc
+import svn.core
+
+def main(files):
+ for f in files:
+ dirpath = fullpath = os.path.abspath(f)
+ if not os.path.isdir(dirpath):
+ dirpath = os.path.dirname(dirpath)
+ adm_baton = svn.wc.adm_open(None, dirpath, 1, 1)
+ try:
+ entry = svn.wc.entry(fullpath, adm_baton, 0)
+ print(entry.url)
+ finally:
+ svn.wc.adm_close(adm_baton)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/tools/examples/headrev.c b/tools/examples/headrev.c
new file mode 100644
index 0000000..1f71d76
--- /dev/null
+++ b/tools/examples/headrev.c
@@ -0,0 +1,226 @@
+/*
+ * headrev.c : print out the HEAD revision of a repository.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * To compile on unix against Subversion and APR libraries, try
+ * something like:
+ *
+ * cc headrev.c -o headrev \
+ * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \
+ * -L/usr/local/apache2/lib -L/usr/local/lib \
+ * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lapr-0 -laprutil-0
+ *
+ */
+
+#include "svn_client.h"
+#include "svn_pools.h"
+#include "svn_config.h"
+#include "svn_fs.h"
+#include "svn_path.h"
+#include "svn_cmdline.h"
+
+
+/* Display a prompt and read a one-line response into the provided buffer,
+ removing a trailing newline if present. */
+static svn_error_t *
+prompt_and_read_line(const char *prompt,
+ char *buffer,
+ size_t max)
+{
+ int len;
+ printf("%s: ", prompt);
+ if (fgets(buffer, max, stdin) == NULL)
+ return svn_error_create(0, NULL, "error reading stdin");
+ len = strlen(buffer);
+ if (len > 0 && buffer[len-1] == '\n')
+ buffer[len-1] = 0;
+ return SVN_NO_ERROR;
+}
+
+/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For
+ a much better example, see svn_cl__auth_simple_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_simple_prompt_callback (svn_auth_cred_simple_t **cred,
+ void *baton,
+ const char *realm,
+ const char *username,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ if (username)
+ ret->username = apr_pstrdup (pool, username);
+ else
+ {
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+ }
+
+ SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf)));
+ ret->password = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For
+ a much better example, see svn_cl__auth_username_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_username_prompt_callback (svn_auth_cred_username_t **cred,
+ void *baton,
+ const char *realm,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A callback function used when the RA layer needs a handle to a
+ temporary file. This is a reduced version of the callback used in
+ the official svn cmdline client. */
+static svn_error_t *
+open_tmp_file (apr_file_t **fp,
+ void *callback_baton,
+ apr_pool_t *pool)
+{
+ const char *path;
+ const char *ignored_filename;
+
+ SVN_ERR (svn_io_temp_dir (&path, pool));
+ path = svn_path_join (path, "tempfile", pool);
+
+ /* Open a unique file, with delete-on-close set. */
+ SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename,
+ path, ".tmp",
+ svn_io_file_del_on_close, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+int
+main (int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+ const char *URL;
+ svn_ra_session_t *session;
+ svn_ra_callbacks2_t *cbtable;
+ svn_revnum_t rev;
+ apr_hash_t *cfg_hash;
+ svn_auth_baton_t *auth_baton;
+
+ if (argc <= 1)
+ {
+ printf ("Usage: %s URL\n", argv[0]);
+ printf (" Print HEAD revision of URL's repository.\n");
+ return EXIT_FAILURE;
+ }
+ else
+ URL = argv[1];
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init ("headrev", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create top-level memory pool. Be sure to read the HACKING file to
+ understand how to properly use/free subpools. */
+ pool = svn_pool_create (NULL);
+
+ /* Initialize the FS library. */
+ err = svn_fs_initialize (pool);
+ if (err) goto hit_error;
+
+ /* Make sure the ~/.subversion run-time config files exist, and load. */
+ err = svn_config_ensure (NULL, pool);
+ if (err) goto hit_error;
+
+ err = svn_config_get_config (&cfg_hash, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Build an authentication baton. */
+ {
+ /* There are many different kinds of authentication back-end
+ "providers". See svn_auth.h for a full overview. */
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers
+ = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *));
+
+ svn_client_get_simple_prompt_provider (&provider,
+ my_simple_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ svn_client_get_username_prompt_provider (&provider,
+ my_username_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ /* Register the auth-providers into the context's auth_baton. */
+ svn_auth_open (&auth_baton, providers, pool);
+ }
+
+ /* Create a table of callbacks for the RA session, mostly nonexistent. */
+ cbtable = apr_pcalloc (pool, sizeof(*cbtable));
+ cbtable->auth_baton = auth_baton;
+ cbtable->open_tmp_file = open_tmp_file;
+
+ /* Now do the real work. */
+
+ err = svn_ra_open2(&session, URL, cbtable, NULL, cfg_hash, pool);
+ if (err) goto hit_error;
+
+ err = svn_ra_get_latest_revnum(session, &rev, pool);
+ if (err) goto hit_error;
+
+ printf ("The latest revision is %ld.\n", rev);
+
+ return EXIT_SUCCESS;
+
+ hit_error:
+ svn_handle_error2 (err, stderr, FALSE, "headrev: ");
+ return EXIT_FAILURE;
+}
diff --git a/tools/examples/info.rb b/tools/examples/info.rb
new file mode 100644
index 0000000..e1097e3
--- /dev/null
+++ b/tools/examples/info.rb
@@ -0,0 +1,91 @@
+#!/usr/bin/env ruby
+#
+# info.rb : output some info about a subversion url
+#
+# Example based on a blogpost by Mark Deepwell
+# http://www.markdeepwell.com/2010/06/ruby-subversion-bindings/
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+require "svn/core"
+require "svn/ext/core"
+require "svn/client"
+require "svn/wc"
+require "svn/repos"
+
+# Prompt function mimicking svn's own prompt
+simple_prompt = Proc.new do
+ |result, realm, username, default, may_save, pool|
+
+ puts "Authentication realm: #{realm}"
+ if username != nil
+ result.username = username
+ else
+ print "Username: "
+ result.username = STDIN.gets.strip
+ end
+ print "Password for '#{result.username}': "
+ result.password = STDIN.gets.strip
+end
+
+gnome_keyring_prompt = Proc.new do
+ |keyring_name|
+
+ print "Password for '#{keyring_name}' GNOME keyring: "
+ STDIN.gets.strip
+end
+
+if ARGV.length != 1
+ puts "Usage: info.rb URL[@REV]"
+else
+ ctx = Svn::Client::Context.new()
+ ctx.add_platform_specific_client_providers
+ ctx.add_simple_provider
+ ctx.add_simple_prompt_provider(2, simple_prompt)
+ ctx.add_username_provider
+ ctx.add_ssl_server_trust_file_provider
+ ctx.add_ssl_client_cert_file_provider
+ ctx.add_ssl_client_cert_pw_file_provider
+
+ # Allow asking for the gnome keyring password, in case the keyring is
+ # locked.
+ if Svn::Ext::Core.respond_to?(:svn_auth_set_gnome_keyring_unlock_prompt_func)
+ Svn::Ext::Core::svn_auth_set_gnome_keyring_unlock_prompt_func(ctx.auth_baton, gnome_keyring_prompt)
+ end
+
+ repos_uri, revision = ARGV[0].split("@", 2)
+ if revision
+ revision = Integer(revision)
+ end
+
+ begin
+ ctx.info(repos_uri, revision) do |path, info|
+ puts("Url: #{info.url}")
+ puts("Last changed rev: #{info.last_changed_rev}")
+ puts("Last changed author: #{info.last_changed_author}")
+ puts("Last changed date: #{info.last_changed_date}")
+ puts("Kind: #{info.kind}")
+ end
+ rescue Svn::Error => e
+ # catch a generic svn error
+ raise "Failed to retrieve SVN info at revision " + revision.to_s
+ end
+end
diff --git a/tools/examples/minimal_client.c b/tools/examples/minimal_client.c
new file mode 100644
index 0000000..967ce5b
--- /dev/null
+++ b/tools/examples/minimal_client.c
@@ -0,0 +1,285 @@
+/*
+ * minimal_client.c - a minimal Subversion client application ("hello world")
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * This app demonstrates how to use the svn_client.h API.
+ *
+ * It reads a directory URL from the commandline, runs
+ * svn_client_list() and prints the list of directory-entries. It
+ * also knows how to deal with basic username/password authentication
+ * challenges.
+ *
+ * For a much more complex example, the svn cmdline client might be
+ * considered the 'reference implementation'.
+ *
+ * From a Linux system, a typical commandline compile might look like:
+ *
+ * cc minimal_client.c -o minimal_client \
+ * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \
+ * -L/usr/local/apache2/lib -L/usr/local/lib \
+ * -lsvn_client-1 -lapr-0 -laprutil-0
+ *
+ */
+
+
+#include "svn_client.h"
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_config.h"
+#include "svn_fs.h"
+
+
+/* Display a prompt and read a one-line response into the provided buffer,
+ removing a trailing newline if present. */
+static svn_error_t *
+prompt_and_read_line(const char *prompt,
+ char *buffer,
+ size_t max)
+{
+ int len;
+ printf("%s: ", prompt);
+ if (fgets(buffer, max, stdin) == NULL)
+ return svn_error_create(0, NULL, "error reading stdin");
+ len = strlen(buffer);
+ if (len > 0 && buffer[len-1] == '\n')
+ buffer[len-1] = 0;
+ return SVN_NO_ERROR;
+}
+
+/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For
+ a much better example, see svn_cl__auth_simple_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_simple_prompt_callback (svn_auth_cred_simple_t **cred,
+ void *baton,
+ const char *realm,
+ const char *username,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ if (username)
+ ret->username = apr_pstrdup (pool, username);
+ else
+ {
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+ }
+
+ SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf)));
+ ret->password = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For
+ a much better example, see svn_cl__auth_username_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_username_prompt_callback (svn_auth_cred_username_t **cred,
+ void *baton,
+ const char *realm,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+
+int
+main (int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+ svn_opt_revision_t revision;
+ apr_hash_t *dirents;
+ apr_hash_index_t *hi;
+ svn_client_ctx_t *ctx;
+ const char *URL;
+
+ if (argc <= 1)
+ {
+ printf ("Usage: %s URL\n", argv[0]);
+ return EXIT_FAILURE;
+ }
+ else
+ URL = argv[1];
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create top-level memory pool. Be sure to read the HACKING file to
+ understand how to properly use/free subpools. */
+ pool = svn_pool_create (NULL);
+
+ /* Initialize the FS library. */
+ err = svn_fs_initialize (pool);
+ if (err)
+ {
+ /* For functions deeper in the stack, we usually use the
+ SVN_ERR() exception-throwing macro (see svn_error.h). At the
+ top level, we catch & print the error with svn_handle_error2(). */
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+
+ /* Make sure the ~/.subversion run-time config files exist */
+ err = svn_config_ensure (NULL, pool);
+ if (err)
+ {
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+
+ /* All clients need to fill out a client_ctx object. */
+ {
+ /* Initialize and allocate the client_ctx object. */
+ if ((err = svn_client_create_context (&ctx, pool)))
+ {
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+
+ /* Load the run-time config file into a hash */
+ if ((err = svn_config_get_config (&(ctx->config), NULL, pool)))
+ {
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+
+#ifdef WIN32
+ /* Set the working copy administrative directory name. */
+ if (getenv ("SVN_ASP_DOT_NET_HACK"))
+ {
+ err = svn_wc_set_adm_dir ("_svn", pool);
+ if (err)
+ {
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+ }
+#endif
+
+ /* Depending on what your client does, you'll want to read about
+ (and implement) the various callback function types below. */
+
+ /* A func (& context) which receives event signals during
+ checkouts, updates, commits, etc. */
+ /* ctx->notify_func = my_notification_func;
+ ctx->notify_baton = NULL; */
+
+ /* A func (& context) which can receive log messages */
+ /* ctx->log_msg_func = my_log_msg_receiver_func;
+ ctx->log_msg_baton = NULL; */
+
+ /* A func (& context) which checks whether the user cancelled */
+ /* ctx->cancel_func = my_cancel_checking_func;
+ ctx->cancel_baton = NULL; */
+
+ /* Make the client_ctx capable of authenticating users */
+ {
+ /* There are many different kinds of authentication back-end
+ "providers". See svn_auth.h for a full overview.
+
+ If you want to get the auth behavior of the 'svn' program,
+ you can use svn_cmdline_setup_auth_baton, which will give
+ you the exact set of auth providers it uses. This program
+ doesn't use it because it's only appropriate for a command
+ line program, and this is supposed to be a general purpose
+ example. */
+
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers
+ = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *));
+
+ svn_auth_get_simple_prompt_provider (&provider,
+ my_simple_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ svn_auth_get_username_prompt_provider (&provider,
+ my_username_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ /* Register the auth-providers into the context's auth_baton. */
+ svn_auth_open (&ctx->auth_baton, providers, pool);
+ }
+ } /* end of client_ctx setup */
+
+
+ /* Now do the real work. */
+
+ /* Set revision to always be the HEAD revision. It could, however,
+ be set to a specific revision number, date, or other values. */
+ revision.kind = svn_opt_revision_head;
+
+ /* Main call into libsvn_client does all the work. */
+ err = svn_client_ls (&dirents,
+ URL, &revision,
+ FALSE, /* no recursion */
+ ctx, pool);
+ if (err)
+ {
+ svn_handle_error2 (err, stderr, FALSE, "minimal_client: ");
+ return EXIT_FAILURE;
+ }
+
+ /* Print the dir entries in the hash. */
+ for (hi = apr_hash_first (pool, dirents); hi; hi = apr_hash_next (hi))
+ {
+ const char *entryname;
+ svn_dirent_t *val;
+
+ apr_hash_this (hi, (void *) &entryname, NULL, (void *) &val);
+ printf (" %s\n", entryname);
+
+ /* 'val' is actually an svn_dirent_t structure; a more complex
+ program would mine it for extra printable information. */
+ }
+
+ return EXIT_SUCCESS;
+}
diff --git a/tools/examples/putfile.py b/tools/examples/putfile.py
new file mode 100755
index 0000000..4b028aa
--- /dev/null
+++ b/tools/examples/putfile.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# USAGE: putfile.py [-m commitmsg] [-u username] file repos-path
+#
+# put a file into an SVN repository
+#
+
+import sys
+import os
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+from svn import fs, core, repos, delta
+
+def putfile(fname, rpath, uname="", commitmsg=""):
+ rpath = core.svn_path_canonicalize(rpath)
+ repos_ptr = repos.open(rpath)
+ fsob = repos.fs(repos_ptr)
+
+ # open a transaction against HEAD
+ rev = fs.youngest_rev(fsob)
+
+ txn = repos.fs_begin_txn_for_commit(repos_ptr, rev, uname, commitmsg)
+
+ root = fs.txn_root(txn)
+ rev_root = fs.revision_root(fsob, rev)
+
+ kind = fs.check_path(root, fname)
+ if kind == core.svn_node_none:
+ print("file '%s' does not exist, creating..." % fname)
+ fs.make_file(root, fname)
+ elif kind == core.svn_node_dir:
+ print("File '%s' is a dir." % fname)
+ return
+ else:
+ print("Updating file '%s'" % fname)
+
+ handler, baton = fs.apply_textdelta(root, fname, None, None)
+
+ ### it would be nice to get an svn_stream_t. for now, just load in the
+ ### whole file and shove it into the FS.
+ delta.svn_txdelta_send_string(open(fname, 'rb').read(),
+ handler, baton)
+
+ newrev = repos.fs_commit_txn(repos_ptr, txn)
+ print("revision: %s" % newrev)
+
+def usage():
+ print("USAGE: putfile.py [-m commitmsg] [-u username] file repos-path")
+ sys.exit(1)
+
+def main():
+ opts, args = my_getopt(sys.argv[1:], 'm:u:')
+ if len(args) != 2:
+ usage()
+
+ uname = commitmsg = ""
+
+ for name, value in opts:
+ if name == '-u':
+ uname = value
+ if name == '-m':
+ commitmsg = value
+ putfile(args[0], args[1], uname, commitmsg)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/revplist.py b/tools/examples/revplist.py
new file mode 100755
index 0000000..5e22928
--- /dev/null
+++ b/tools/examples/revplist.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# revplist.py : display revision properties
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+import sys
+import os
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+from svn import fs, core
+
+def plist(rev=None, home='.', *props):
+
+ db_path = os.path.join(home, 'db')
+ if not os.path.exists(db_path):
+ db_path = home
+
+ fs_ptr = fs.new(None)
+ fs.open_berkeley(fs_ptr, db_path)
+
+ if rev is None:
+ rev = fs.youngest_rev(fs_ptr)
+
+ print('Properties for revision: %s' % rev)
+ if props:
+ for propname in props:
+ value = fs.revision_prop(fs_ptr, rev, propname)
+ if value is None:
+ print('%s: <not present>' % propname)
+ else:
+ print('%s: %s' % (propname, value))
+ else:
+ proplist = fs.revision_proplist(fs_ptr, rev)
+ for propname, value in proplist.items():
+ print('%s: %s' % (propname, value))
+
+def usage():
+ print("USAGE: %s [-r REV] [-h DBHOME] [PROP1 [PROP2 ...]]" % sys.argv[0])
+ sys.exit(1)
+
+def main():
+ ### how to invoke usage() ?
+ opts, args = my_getopt(sys.argv[1:], 'r:h:')
+ rev = None
+ home = '.'
+ for name, value in opts:
+ if name == '-r':
+ rev = int(value)
+ elif name == '-h':
+ home = value
+
+ plist(rev, home, *args)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/svnlog2html.rb b/tools/examples/svnlog2html.rb
new file mode 100755
index 0000000..a7571cc
--- /dev/null
+++ b/tools/examples/svnlog2html.rb
@@ -0,0 +1,139 @@
+#!/usr/bin/env ruby
+
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+require "erb"
+require "svn/client"
+
+include ERB::Util
+
+path = File.expand_path(ARGV.shift || Dir.pwd)
+
+html = <<-HEADER
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html
+ PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+ <style type="text/css">
+div.entry
+{
+ border: 1px solid red;
+ border-width: 1px 0 0 1px;
+ margin: 2em 2em 2em 3em;
+ padding: 0 2em;
+}
+
+pre.message
+{
+ border-left: 1px solid red;
+ margin: 1em 2em;
+ padding-left: 1em;
+}
+
+div.info
+{
+ text-align: right;
+}
+
+span.info
+{
+ border-bottom: 1px solid red;
+ padding: 0 5px 1px 1em;
+}
+
+span.author
+{
+ font-style: italic;
+}
+
+span.date
+{
+ color: #999;
+}
+
+li.action-A
+{
+ color: blue;
+}
+
+li.action-M
+{
+ color: green;
+}
+
+li.action-D
+{
+ color: red;
+ text-decoration: line-through;
+}
+ </style>
+ <title>#{h path}</title>
+</head>
+<body>
+<h1>#{h path}</h1>
+HEADER
+
+ctx = Svn::Client::Context.new
+ctx.log(path, "HEAD", 0, 40, true, true) do
+ |changed_paths, rev, author, date, message|
+
+ html << <<-ENTRY_HEADER
+
+<div class="entry">
+ <h2>r#{h rev}</h2>
+ <pre class="message">#{h message}</pre>
+ <div class="info">
+ <span class="info">
+ by <span class="author">#{h author}</span>
+ at <span class="date">#{date}</span>
+ </span>
+ </div>
+ <div class="changed-path">
+ENTRY_HEADER
+
+ changed_paths.sort.each do |path, changed_path|
+ action = changed_path.action
+ html << <<-ENTRY_PATH
+ <ul>
+ <li class="action-#{h action}">
+ <span class="action">#{h action}</span>:
+ <span class="changed-path">#{h path}</span>
+ </li>
+ </ul>
+ENTRY_PATH
+ end
+
+ html << <<-ENTRY_FOOTER
+ </div>
+</div>
+
+ENTRY_FOOTER
+end
+
+html << <<-FOOTER
+</body>
+</html>
+FOOTER
+
+puts html
diff --git a/tools/examples/svnlook.py b/tools/examples/svnlook.py
new file mode 100755
index 0000000..5865e22
--- /dev/null
+++ b/tools/examples/svnlook.py
@@ -0,0 +1,560 @@
+#!/usr/bin/env python
+#
+# svnlook.py : alternative svnlook in Python with library API
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/examples/svnlook.py $
+# $LastChangedDate: 2013-11-14 11:11:07 +0000 (Thu, 14 Nov 2013) $
+# $LastChangedRevision: 1541878 $
+
+"""
+svnlook.py can also be used as a Python module::
+
+ >>> import svnlook
+ >>> svnlook = svnlook.SVNLook("/testrepo")
+ >>> svnlook.get_author()
+ 'randomjoe'
+
+
+Accessible API::
+
+[x] author
+[x] changed
+[x] date
+[ ] diff
+[x] dirs-changed
+[ ] ids
+[x] info
+[x] log
+[ ] tree
+---
+[ ] generator API to avoid passing lists
+"""
+
+
+import sys
+import time
+import os
+
+from svn import core, fs, delta, repos
+
+class SVNLook(object):
+ def __init__(self, path, rev=None, txn=None, cmd=None):
+ """
+ path - path to repository
+ rev - revision number
+ txn - name of transaction (usually the one about to be committed)
+ cmd - if set, specifies cmd_* method to execute
+
+ txn takes precedence over rev; if both are None, inspect the head revision
+ """
+ path = core.svn_path_canonicalize(path)
+ repos_ptr = repos.open(path)
+ self.fs_ptr = repos.fs(repos_ptr)
+
+ # if set, txn takes precendence
+ if txn:
+ self.txn_ptr = fs.open_txn(self.fs_ptr, txn)
+ else:
+ self.txn_ptr = None
+ if rev is None:
+ rev = fs.youngest_rev(self.fs_ptr)
+ else:
+ rev = int(rev)
+ self.rev = rev
+
+ if cmd != None:
+ getattr(self, 'cmd_' + cmd)()
+
+ def cmd_default(self):
+ self.cmd_info()
+ self.cmd_tree()
+
+ def cmd_author(self):
+ print(self.get_author() or '')
+
+ def cmd_changed(self):
+ for status, path in self.get_changed():
+ print("%-3s %s" % (status, path))
+
+ def cmd_date(self):
+ # duplicate original svnlook format, which is
+ # 2010-02-08 21:53:15 +0200 (Mon, 08 Feb 2010)
+ secs = self.get_date(unixtime=True)
+ if secs is None:
+ print("")
+ else:
+ # convert to tuple, detect time zone and format
+ stamp = time.localtime(secs)
+ isdst = stamp.tm_isdst
+ utcoffset = -(time.altzone if (time.daylight and isdst) else time.timezone) // 60
+
+ suffix = "%+03d%02d" % (utcoffset // 60, abs(utcoffset) % 60)
+ outstr = time.strftime('%Y-%m-%d %H:%M:%S ', stamp) + suffix
+ outstr += time.strftime(' (%a, %d %b %Y)', stamp)
+ print(outstr)
+
+
+ def cmd_diff(self):
+ self._print_tree(DiffEditor, pass_root=1)
+
+ def cmd_dirs_changed(self):
+ for dir in self.get_changed_dirs():
+ print(dir)
+
+ def cmd_ids(self):
+ self._print_tree(Editor, base_rev=0, pass_root=1)
+
+ def cmd_info(self):
+ """print the author, data, log_size, and log message"""
+ self.cmd_author()
+ self.cmd_date()
+ log = self.get_log() or ''
+ print(len(log))
+ print(log)
+
+ def cmd_log(self):
+ print(self.get_log() or '')
+
+ def cmd_tree(self):
+ self._print_tree(Editor, base_rev=0)
+
+
+ # --- API getters
+ def get_author(self):
+ """return string with the author name or None"""
+ return self._get_property(core.SVN_PROP_REVISION_AUTHOR)
+
+ def get_changed(self):
+ """return list of tuples (status, path)"""
+ ret = []
+ def list_callback(status, path):
+ ret.append( (status, path) )
+ self._walk_tree(ChangedEditor, pass_root=1, callback=list_callback)
+ return ret
+
+ def get_date(self, unixtime=False):
+ """return commit timestamp in RFC 3339 format (2010-02-08T20:37:25.195000Z)
+ if unixtime is True, return unix timestamp
+ return None for a txn, or if date property is not set
+ """
+ if self.txn_ptr:
+ return None
+
+ date = self._get_property(core.SVN_PROP_REVISION_DATE)
+ if not unixtime or date == None:
+ return date
+
+ # convert to unix time
+ aprtime = core.svn_time_from_cstring(date)
+ # ### convert to a time_t; this requires intimate knowledge of
+ # ### the apr_time_t type
+ secs = aprtime / 1000000 # aprtime is microseconds; make seconds
+ return secs
+
+ def get_changed_dirs(self):
+ """return list of changed dirs
+ dir names end with trailing forward slash even on windows
+ """
+ dirlist = []
+ def list_callback(item):
+ dirlist.append(item)
+ self._walk_tree(DirsChangedEditor, callback=list_callback)
+ return dirlist
+
+ def get_log(self):
+ """return log message string or None if not present"""
+ return self._get_property(core.SVN_PROP_REVISION_LOG)
+
+
+ # --- Internal helpers
+ def _get_property(self, name):
+ if self.txn_ptr:
+ return fs.txn_prop(self.txn_ptr, name)
+ return fs.revision_prop(self.fs_ptr, self.rev, name)
+
+ def _print_tree(self, e_factory, base_rev=None, pass_root=0):
+ def print_callback(msg):
+ print(msg)
+ self._walk_tree(e_factory, base_rev, pass_root, callback=print_callback)
+
+ # svn fs, delta, repos calls needs review according to DeltaEditor documentation
+ def _walk_tree(self, e_factory, base_rev=None, pass_root=0, callback=None):
+ if base_rev is None:
+ # a specific base rev was not provided. use the transaction base,
+ # or the previous revision
+ if self.txn_ptr:
+ base_rev = fs.txn_base_revision(self.txn_ptr)
+ elif self.rev == 0:
+ base_rev = 0
+ else:
+ base_rev = self.rev - 1
+
+ # get the current root
+ if self.txn_ptr:
+ root = fs.txn_root(self.txn_ptr)
+ else:
+ root = fs.revision_root(self.fs_ptr, self.rev)
+
+ # the base of the comparison
+ base_root = fs.revision_root(self.fs_ptr, base_rev)
+
+ if callback == None:
+ callback = lambda msg: None
+
+ if pass_root:
+ editor = e_factory(root, base_root, callback)
+ else:
+ editor = e_factory(callback=callback)
+
+ # construct the editor for printing these things out
+ e_ptr, e_baton = delta.make_editor(editor)
+
+ # compute the delta, printing as we go
+ def authz_cb(root, path, pool):
+ return 1
+ repos.dir_delta(base_root, '', '', root, '',
+ e_ptr, e_baton, authz_cb, 0, 1, 0, 0)
+
+
+# ---------------------------------------------------------
+# Delta Editors. For documentation see:
+# http://subversion.apache.org/docs/community-guide/#docs
+
+# this one doesn't process delete_entry, change_dir_prop, apply_text_delta,
+# change_file_prop, close_file, close_edit, abort_edit
+# ?set_target_revision
+# need review
+class Editor(delta.Editor):
+ def __init__(self, root=None, base_root=None, callback=None):
+ """callback argument is unused for this editor"""
+ self.root = root
+ # base_root ignored
+
+ self.indent = ''
+
+ def open_root(self, base_revision, dir_pool):
+ print('/' + self._get_id('/'))
+ self.indent = self.indent + ' ' # indent one space
+
+ def add_directory(self, path, *args):
+ id = self._get_id(path)
+ print(self.indent + _basename(path) + '/' + id)
+ self.indent = self.indent + ' ' # indent one space
+
+ # we cheat. one method implementation for two entry points.
+ open_directory = add_directory
+
+ def close_directory(self, baton):
+ # note: if indents are being performed, this slice just returns
+ # another empty string.
+ self.indent = self.indent[:-1]
+
+ def add_file(self, path, *args):
+ id = self._get_id(path)
+ print(self.indent + _basename(path) + id)
+
+ # we cheat. one method implementation for two entry points.
+ open_file = add_file
+
+ def _get_id(self, path):
+ if self.root:
+ id = fs.node_id(self.root, path)
+ return ' <%s>' % fs.unparse_id(id)
+ return ''
+
+# doesn't process close_directory, apply_text_delta,
+# change_file_prop, close_file, close_edit, abort_edit
+# ?set_target_revision
+class DirsChangedEditor(delta.Editor):
+ """print names of changed dirs, callback(dir) is a printer function"""
+ def __init__(self, callback):
+ self.callback = callback
+
+ def open_root(self, base_revision, dir_pool):
+ return [ 1, '' ]
+
+ def delete_entry(self, path, revision, parent_baton, pool):
+ self._dir_changed(parent_baton)
+
+ def add_directory(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, dir_pool):
+ self._dir_changed(parent_baton)
+ return [ 1, path ]
+
+ def open_directory(self, path, parent_baton, base_revision, dir_pool):
+ return [ 1, path ]
+
+ def change_dir_prop(self, dir_baton, name, value, pool):
+ self._dir_changed(dir_baton)
+
+ def add_file(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, file_pool):
+ self._dir_changed(parent_baton)
+
+ def open_file(self, path, parent_baton, base_revision, file_pool):
+ # some kind of change is going to happen
+ self._dir_changed(parent_baton)
+
+ def _dir_changed(self, baton):
+ if baton[0]:
+ # the directory hasn't been printed yet. do it.
+ self.callback(baton[1] + '/')
+ baton[0] = 0
+
+class ChangedEditor(delta.Editor):
+ def __init__(self, root, base_root, callback):
+ """callback(status, path) is a printer function"""
+ self.root = root
+ self.base_root = base_root
+ self.callback = callback
+
+ def open_root(self, base_revision, dir_pool):
+ return [ 1, '' ]
+
+ def delete_entry(self, path, revision, parent_baton, pool):
+ ### need more logic to detect 'replace'
+ if fs.is_dir(self.base_root, '/' + path):
+ self.callback('D', path + '/')
+ else:
+ self.callback('D', path)
+
+ def add_directory(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, dir_pool):
+ self.callback('A', path + '/')
+ return [ 0, path ]
+
+ def open_directory(self, path, parent_baton, base_revision, dir_pool):
+ return [ 1, path ]
+
+ def change_dir_prop(self, dir_baton, name, value, pool):
+ if dir_baton[0]:
+ # the directory hasn't been printed yet. do it.
+ self.callback('_U', dir_baton[1] + '/')
+ dir_baton[0] = 0
+
+ def add_file(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, file_pool):
+ self.callback('A', path)
+ return [ '_', ' ', None ]
+
+ def open_file(self, path, parent_baton, base_revision, file_pool):
+ return [ '_', ' ', path ]
+
+ def apply_textdelta(self, file_baton, base_checksum):
+ file_baton[0] = 'U'
+
+ # no handler
+ return None
+
+ def change_file_prop(self, file_baton, name, value, pool):
+ file_baton[1] = 'U'
+
+ def close_file(self, file_baton, text_checksum):
+ text_mod, prop_mod, path = file_baton
+ # test the path. it will be None if we added this file.
+ if path:
+ status = text_mod + prop_mod
+ # was there some kind of change?
+ if status != '_ ':
+ self.callback(status.rstrip(), path)
+
+
+class DiffEditor(delta.Editor):
+ def __init__(self, root, base_root, callback=None):
+ """callback argument is unused for this editor"""
+ self.root = root
+ self.base_root = base_root
+ self.target_revision = 0
+
+ def _do_diff(self, base_path, path):
+ if base_path is None:
+ print("Added: " + path)
+ label = path
+ elif path is None:
+ print("Removed: " + base_path)
+ label = base_path
+ else:
+ print("Modified: " + path)
+ label = path
+ print("===============================================================" + \
+ "===============")
+ args = []
+ args.append("-L")
+ args.append(label + "\t(original)")
+ args.append("-L")
+ args.append(label + "\t(new)")
+ args.append("-u")
+ differ = fs.FileDiff(self.base_root, base_path, self.root,
+ path, diffoptions=args)
+ pobj = differ.get_pipe()
+ while True:
+ line = pobj.readline()
+ if not line:
+ break
+ sys.stdout.write("%s " % line)
+ print("")
+
+ def _do_prop_diff(self, path, prop_name, prop_val, pool):
+ print("Property changes on: " + path)
+ print("_______________________________________________________________" + \
+ "_______________")
+
+ old_prop_val = None
+
+ try:
+ old_prop_val = fs.node_prop(self.base_root, path, prop_name, pool)
+ except core.SubversionException:
+ pass # Must be a new path
+
+ if old_prop_val:
+ if prop_val:
+ print("Modified: " + prop_name)
+ print(" - " + str(old_prop_val))
+ print(" + " + str(prop_val))
+ else:
+ print("Deleted: " + prop_name)
+ print(" - " + str(old_prop_val))
+ else:
+ print("Added: " + prop_name)
+ print(" + " + str(prop_val))
+
+ print("")
+
+ def delete_entry(self, path, revision, parent_baton, pool):
+ ### need more logic to detect 'replace'
+ if not fs.is_dir(self.base_root, '/' + path):
+ self._do_diff(path, None)
+
+ def add_directory(self, path, parent_baton, copyfrom_path,
+ copyfrom_revision, dir_pool):
+ return [ 1, path ]
+
+ def add_file(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, file_pool):
+ self._do_diff(None, path)
+ return [ '_', ' ', None ]
+
+ def open_root(self, base_revision, dir_pool):
+ return [ 1, '' ]
+
+ def open_directory(self, path, parent_baton, base_revision, dir_pool):
+ return [ 1, path ]
+
+ def open_file(self, path, parent_baton, base_revision, file_pool):
+ return [ '_', ' ', path ]
+
+ def apply_textdelta(self, file_baton, base_checksum):
+ if file_baton[2] is not None:
+ self._do_diff(file_baton[2], file_baton[2])
+ return None
+
+ def change_file_prop(self, file_baton, name, value, pool):
+ if file_baton[2] is not None:
+ self._do_prop_diff(file_baton[2], name, value, pool)
+ return None
+
+ def change_dir_prop(self, dir_baton, name, value, pool):
+ if dir_baton[1] is not None:
+ self._do_prop_diff(dir_baton[1], name, value, pool)
+ return None
+
+ def set_target_revision(self, target_revision):
+ self.target_revision = target_revision
+
+def _basename(path):
+ "Return the basename for a '/'-separated path."
+ idx = path.rfind('/')
+ if idx == -1:
+ return path
+ return path[idx+1:]
+
+
+def usage(exit):
+ if exit:
+ output = sys.stderr
+ else:
+ output = sys.stdout
+
+ output.write(
+ "usage: %s REPOS_PATH rev REV [COMMAND] - inspect revision REV\n"
+ " %s REPOS_PATH txn TXN [COMMAND] - inspect transaction TXN\n"
+ " %s REPOS_PATH [COMMAND] - inspect the youngest revision\n"
+ "\n"
+ "REV is a revision number > 0.\n"
+ "TXN is a transaction name.\n"
+ "\n"
+ "If no command is given, the default output (which is the same as\n"
+ "running the subcommands `info' then `tree') will be printed.\n"
+ "\n"
+ "COMMAND can be one of: \n"
+ "\n"
+ " author: print author.\n"
+ " changed: print full change summary: all dirs & files changed.\n"
+ " date: print the timestamp (revisions only).\n"
+ " diff: print GNU-style diffs of changed files and props.\n"
+ " dirs-changed: print changed directories.\n"
+ " ids: print the tree, with nodes ids.\n"
+ " info: print the author, data, log_size, and log message.\n"
+ " log: print log message.\n"
+ " tree: print the tree.\n"
+ "\n"
+ % (sys.argv[0], sys.argv[0], sys.argv[0]))
+
+ sys.exit(exit)
+
+def main():
+ if len(sys.argv) < 2:
+ usage(1)
+
+ rev = txn = None
+
+ args = sys.argv[2:]
+ if args:
+ cmd = args[0]
+ if cmd == 'rev':
+ if len(args) == 1:
+ usage(1)
+ try:
+ rev = int(args[1])
+ except ValueError:
+ usage(1)
+ del args[:2]
+ elif cmd == 'txn':
+ if len(args) == 1:
+ usage(1)
+ txn = args[1]
+ del args[:2]
+
+ if args:
+ if len(args) > 1:
+ usage(1)
+ cmd = args[0].replace('-', '_')
+ else:
+ cmd = 'default'
+
+ if not hasattr(SVNLook, 'cmd_' + cmd):
+ usage(1)
+
+ SVNLook(sys.argv[1], rev, txn, cmd)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/svnlook.rb b/tools/examples/svnlook.rb
new file mode 100755
index 0000000..a48dcca
--- /dev/null
+++ b/tools/examples/svnlook.rb
@@ -0,0 +1,516 @@
+#!/usr/bin/env ruby
+#
+# svnlook.rb : a Ruby-based replacement for svnlook
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+require "svn/core"
+require "svn/fs"
+require "svn/delta"
+require "svn/repos"
+
+# Chomp off trailing slashes
+def basename(path)
+ path.chomp("/")
+end
+
+# SvnLook: a Ruby-based replacement for svnlook
+class SvnLook
+
+ # Initialize the SvnLook application
+ def initialize(path, rev, txn)
+ # Open a repository
+ @fs = Svn::Repos.open(basename(path)).fs
+
+ # If a transaction was specified, open it
+ if txn
+ @txn = @fs.open_txn(txn)
+ else
+ # Use the latest revision from the repo,
+ # if they haven't specified a revision
+ @txn = nil
+ rev ||= @fs.youngest_rev
+ end
+
+ @rev = rev
+ end
+
+ # Dispatch all commands to appropriate subroutines
+ def run(cmd, *args)
+ dispatch(cmd, *args)
+ end
+
+ private
+
+ # Dispatch all commands to appropriate subroutines
+ def dispatch(cmd, *args)
+ if respond_to?("cmd_#{cmd}", true)
+ begin
+ __send__("cmd_#{cmd}", *args)
+ rescue ArgumentError
+ puts $!.message
+ puts $@
+ puts("invalid argument for #{cmd}: #{args.join(' ')}")
+ end
+ else
+ puts("unknown command: #{cmd}")
+ end
+ end
+
+ # Default command: Run the 'info' and 'tree' commands
+ def cmd_default
+ cmd_info
+ cmd_tree
+ end
+
+ # Print the 'author' of the specified revision or transaction
+ def cmd_author
+ puts(property(Svn::Core::PROP_REVISION_AUTHOR) || "")
+ end
+
+ # Not implemented yet
+ def cmd_cat
+ end
+
+ # Find out what has changed in the specified revision or transaction
+ def cmd_changed
+ print_tree(ChangedEditor, nil, true)
+ end
+
+ # Output the date that the current revision was committed.
+ def cmd_date
+ if @txn
+ # It's not committed yet, so output nothing
+ puts
+ else
+ # Get the time the revision was committed
+ date = property(Svn::Core::PROP_REVISION_DATE)
+
+ if date
+ # Print out the date in a nice format
+ puts date.strftime('%Y-%m-%d %H:%M(%Z)')
+ else
+ # The specified revision doesn't have an associated date.
+ # Output just a blank line.
+ puts
+ end
+ end
+ end
+
+ # Output what changed in the specified revision / transaction
+ def cmd_diff
+ print_tree(DiffEditor, nil, true)
+ end
+
+ # Output what directories changed in the specified revision / transaction
+ def cmd_dirs_changed
+ print_tree(DirsChangedEditor)
+ end
+
+ # Output the tree, with node ids
+ def cmd_ids
+ print_tree(Editor, 0, true)
+ end
+
+ # Output the author, date, and the log associated with the specified
+ # revision / transaction
+ def cmd_info
+ cmd_author
+ cmd_date
+ cmd_log(true)
+ end
+
+ # Output the log message associated with the specified revision / transaction
+ def cmd_log(print_size=false)
+ log = property(Svn::Core::PROP_REVISION_LOG) || ''
+ puts log.length if print_size
+ puts log
+ end
+
+ # Output the tree associated with the provided tree
+ def cmd_tree
+ print_tree(Editor, 0)
+ end
+
+ # Output the repository's UUID.
+ def cmd_uuid
+ puts @fs.uuid
+ end
+
+ # Output the repository's youngest revision.
+ def cmd_youngest
+ puts @fs.youngest_rev
+ end
+
+ # Return a property of the specified revision or transaction.
+ # Name: the ID of the property you want to retrieve.
+ # E.g. Svn::Core::PROP_REVISION_LOG
+ def property(name)
+ if @txn
+ @txn.prop(name)
+ else
+ @fs.prop(name, @rev)
+ end
+ end
+
+ # Print a tree of differences between two revisions
+ def print_tree(editor_class, base_rev=nil, pass_root=false)
+ if base_rev.nil?
+ if @txn
+ # Output changes since the base revision of the transaction
+ base_rev = @txn.base_revision
+ else
+ # Output changes since the previous revision
+ base_rev = @rev - 1
+ end
+ end
+
+ # Get the root of the specified transaction or revision
+ if @txn
+ root = @txn.root
+ else
+ root = @fs.root(@rev)
+ end
+
+ # Get the root of the base revision
+ base_root = @fs.root(base_rev)
+
+ # Does the provided editor need to know
+ # the revision and base revision we're working with?
+ if pass_root
+ # Create a new editor with the provided root and base_root
+ editor = editor_class.new(root, base_root)
+ else
+ # Create a new editor with nil root and base_roots
+ editor = editor_class.new
+ end
+
+ # Do a directory delta between the two roots with
+ # the specified editor
+ base_root.dir_delta('', '', root, '', editor)
+ end
+
+ # Output the current tree for a specified revision
+ class Editor < Svn::Delta::BaseEditor
+
+ # Initialize the Editor object
+ def initialize(root=nil, base_root=nil)
+ @root = root
+ # base_root ignored
+
+ @indent = ""
+ end
+
+ # Recurse through the root (and increase the indent level)
+ def open_root(base_revision)
+ puts "/#{id('/')}"
+ @indent << ' '
+ end
+
+ # If a directory is added, output this and increase
+ # the indent level
+ def add_directory(path, *args)
+ puts "#{@indent}#{basename(path)}/#{id(path)}"
+ @indent << ' '
+ end
+
+ alias open_directory add_directory
+
+ # If a directory is closed, reduce the ident level
+ def close_directory(baton)
+ @indent.chop!
+ end
+
+ # If a file is added, output that it has been changed
+ def add_file(path, *args)
+ puts "#{@indent}#{basename(path)}#{id(path)}"
+ end
+
+ alias open_file add_file
+
+ # Private methods
+ private
+
+ # Get the node id of a particular path
+ def id(path)
+ if @root
+ fs_id = @root.node_id(path)
+ " <#{fs_id.unparse}>"
+ else
+ ""
+ end
+ end
+ end
+
+
+ # Output directories that have been changed.
+ # In this class, methods such as open_root and add_file
+ # are inherited from Svn::Delta::ChangedDirsEditor.
+ class DirsChangedEditor < Svn::Delta::ChangedDirsEditor
+
+ # Private functions
+ private
+
+ # Print out the name of a directory if it has been changed.
+ # But only do so once.
+ # This behaves in a way like a callback function does.
+ def dir_changed(baton)
+ if baton[0]
+ # The directory hasn't been printed yet,
+ # so print it out.
+ puts baton[1] + '/'
+
+ # Make sure we don't print this directory out twice
+ baton[0] = nil
+ end
+ end
+ end
+
+ # Output files that have been changed between two roots
+ class ChangedEditor < Svn::Delta::BaseEditor
+
+ # Constructor
+ def initialize(root, base_root)
+ @root = root
+ @base_root = base_root
+ end
+
+ # Look at the root node
+ def open_root(base_revision)
+ # Nothing has been printed out yet, so return 'true'.
+ [true, '']
+ end
+
+ # Output deleted files
+ def delete_entry(path, revision, parent_baton)
+ # Output deleted paths with a D in front of them
+ print "D #{path}"
+
+ # If we're deleting a directory,
+ # indicate this with a trailing slash
+ if @base_root.dir?('/' + path)
+ puts "/"
+ else
+ puts
+ end
+ end
+
+ # Output that a directory has been added
+ def add_directory(path, parent_baton,
+ copyfrom_path, copyfrom_revision)
+ # Output 'A' to indicate that the directory was added.
+ # Also put a trailing slash since it's a directory.
+ puts "A #{path}/"
+
+ # The directory has been printed -- don't print it again.
+ [false, path]
+ end
+
+ # Recurse inside directories
+ def open_directory(path, parent_baton, base_revision)
+ # Nothing has been printed out yet, so return true.
+ [true, path]
+ end
+
+ def change_dir_prop(dir_baton, name, value)
+ # Has the directory been printed yet?
+ if dir_baton[0]
+ # Print the directory
+ puts "_U #{dir_baton[1]}/"
+
+ # Don't let this directory get printed again.
+ dir_baton[0] = false
+ end
+ end
+
+ def add_file(path, parent_baton,
+ copyfrom_path, copyfrom_revision)
+ # Output that a directory has been added
+ puts "A #{path}"
+
+ # We've already printed out this entry, so return '_'
+ # to prevent it from being printed again
+ ['_', ' ', nil]
+ end
+
+
+ def open_file(path, parent_baton, base_revision)
+ # Changes have been made -- return '_' to indicate as such
+ ['_', ' ', path]
+ end
+
+ def apply_textdelta(file_baton, base_checksum)
+ # The file has been changed -- we'll print that out later.
+ file_baton[0] = 'U'
+ nil
+ end
+
+ def change_file_prop(file_baton, name, value)
+ # The file has been changed -- we'll print that out later.
+ file_baton[1] = 'U'
+ end
+
+ def close_file(file_baton, text_checksum)
+ text_mod, prop_mod, path = file_baton
+ # Test the path. It will be nil if we added this file.
+ if path
+ status = text_mod + prop_mod
+ # Was there some kind of change?
+ if status != '_ '
+ puts "#{status} #{path}"
+ end
+ end
+ end
+ end
+
+ # Output diffs of files that have been changed
+ class DiffEditor < Svn::Delta::BaseEditor
+
+ # Constructor
+ def initialize(root, base_root)
+ @root = root
+ @base_root = base_root
+ end
+
+ # Handle deleted files and directories
+ def delete_entry(path, revision, parent_baton)
+ # Print out diffs of deleted files, but not
+ # deleted directories
+ unless @base_root.dir?('/' + path)
+ do_diff(path, nil)
+ end
+ end
+
+ # Handle added files
+ def add_file(path, parent_baton,
+ copyfrom_path, copyfrom_revision)
+ # If a file has been added, print out the diff.
+ do_diff(nil, path)
+
+ ['_', ' ', nil]
+ end
+
+ # Handle files
+ def open_file(path, parent_baton, base_revision)
+ ['_', ' ', path]
+ end
+
+ # If a file is changed, print out the diff
+ def apply_textdelta(file_baton, base_checksum)
+ if file_baton[2].nil?
+ nil
+ else
+ do_diff(file_baton[2], file_baton[2])
+ end
+ end
+
+ private
+
+ # Print out a diff between two paths
+ def do_diff(base_path, path)
+ if base_path.nil?
+ # If there's no base path, then the file
+ # must have been added
+ puts("Added: #{path}")
+ name = path
+ elsif path.nil?
+ # If there's no new path, then the file
+ # must have been deleted
+ puts("Removed: #{base_path}")
+ name = base_path
+ else
+ # Otherwise, the file must have been modified
+ puts "Modified: #{path}"
+ name = path
+ end
+
+ # Set up labels for the two files
+ base_label = "#{name} (original)"
+ label = "#{name} (new)"
+
+ # Output a unified diff between the two files
+ puts "=" * 78
+ differ = Svn::Fs::FileDiff.new(@base_root, base_path, @root, path)
+ puts differ.unified(base_label, label)
+ puts
+ end
+ end
+end
+
+# Output usage message and exit
+def usage
+ messages = [
+ "usage: #{$0} REPOS_PATH rev REV [COMMAND] - inspect revision REV",
+ " #{$0} REPOS_PATH txn TXN [COMMAND] - inspect transaction TXN",
+ " #{$0} REPOS_PATH [COMMAND] - inspect the youngest revision",
+ "",
+ "REV is a revision number > 0.",
+ "TXN is a transaction name.",
+ "",
+ "If no command is given, the default output (which is the same as",
+ "running the subcommands `info' then `tree') will be printed.",
+ "",
+ "COMMAND can be one of: ",
+ "",
+ " author: print author.",
+ " changed: print full change summary: all dirs & files changed.",
+ " date: print the timestamp (revisions only).",
+ " diff: print GNU-style diffs of changed files and props.",
+ " dirs-changed: print changed directories.",
+ " ids: print the tree, with nodes ids.",
+ " info: print the author, data, log_size, and log message.",
+ " log: print log message.",
+ " tree: print the tree.",
+ " uuid: print the repository's UUID (REV and TXN ignored).",
+ " youngest: print the youngest revision number (REV and TXN ignored).",
+ ]
+ puts(messages.join("\n"))
+ exit(1)
+end
+
+# Output usage if necessary
+if ARGV.empty?
+ usage
+end
+
+# Process arguments
+path = ARGV.shift
+cmd = ARGV.shift
+rev = nil
+txn = nil
+
+case cmd
+when "rev"
+ rev = Integer(ARGV.shift)
+ cmd = ARGV.shift
+when "txn"
+ txn = ARGV.shift
+ cmd = ARGV.shift
+end
+
+# If no command is specified, use the default
+cmd ||= "default"
+
+# Replace dashes in the command with underscores
+cmd = cmd.gsub(/-/, '_')
+
+# Start SvnLook with the specified command
+SvnLook.new(path, rev, txn).run(cmd)
diff --git a/tools/examples/svnput.c b/tools/examples/svnput.c
new file mode 100644
index 0000000..6414fef
--- /dev/null
+++ b/tools/examples/svnput.c
@@ -0,0 +1,352 @@
+/*
+ * svnput.c : upload a single file to a repository, overwriting
+ * any existing file by the same name.
+ *
+ * ***************************************************************
+
+ * WARNING!! Despite the warnings it gives, this program allows
+ * you to potentially overwrite a file you've never seen.
+ * USE AT YOUR OWN RISK!
+ *
+ * (While the repository won't 'lose' overwritten data, the
+ * overwriting may happen without your knowledge, and has the
+ * potential to cause much grief with your collaborators!)
+ *
+ * ***************************************************************
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * To compile on unix against Subversion and APR libraries, try
+ * something like:
+ *
+ * cc svnput.c -o svnput \
+ * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \
+ * -L/usr/local/apache2/lib -L/usr/local/lib \
+ * -lsvn_client-1 -lapr-0 -laprutil-0
+ *
+ */
+
+#include "svn_client.h"
+#include "svn_pools.h"
+#include "svn_config.h"
+#include "svn_fs.h"
+#include "svn_cmdline.h"
+#include "svn_path.h"
+#include "svn_time.h"
+
+
+/* Display a prompt and read a one-line response into the provided buffer,
+ removing a trailing newline if present. */
+static svn_error_t *
+prompt_and_read_line(const char *prompt,
+ char *buffer,
+ size_t max)
+{
+ int len;
+ printf("%s: ", prompt);
+ if (fgets(buffer, max, stdin) == NULL)
+ return svn_error_create(0, NULL, "error reading stdin");
+ len = strlen(buffer);
+ if (len > 0 && buffer[len-1] == '\n')
+ buffer[len-1] = 0;
+ return SVN_NO_ERROR;
+}
+
+/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For
+ a much better example, see svn_cl__auth_simple_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_simple_prompt_callback (svn_auth_cred_simple_t **cred,
+ void *baton,
+ const char *realm,
+ const char *username,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ if (username)
+ ret->username = apr_pstrdup (pool, username);
+ else
+ {
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+ }
+
+ SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf)));
+ ret->password = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For
+ a much better example, see svn_cl__auth_username_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_username_prompt_callback (svn_auth_cred_username_t **cred,
+ void *baton,
+ const char *realm,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+/* A callback function used when the RA layer needs a handle to a
+ temporary file. This is a reduced version of the callback used in
+ the official svn cmdline client. */
+static svn_error_t *
+open_tmp_file (apr_file_t **fp,
+ void *callback_baton,
+ apr_pool_t *pool)
+{
+ const char *path;
+ const char *ignored_filename;
+
+ SVN_ERR (svn_io_temp_dir (&path, pool));
+ path = svn_path_join (path, "tempfile", pool);
+
+ /* Open a unique file, with delete-on-close set. */
+ SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename,
+ path, ".tmp",
+ svn_io_file_del_on_close, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Called when a commit is successful. */
+static svn_error_t *
+my_commit_callback (svn_revnum_t new_revision,
+ const char *date,
+ const char *author,
+ void *baton)
+{
+ printf ("Upload complete. Committed revision %ld.\n", new_revision);
+ return SVN_NO_ERROR;
+}
+
+
+
+int
+main (int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+ apr_hash_t *dirents;
+ const char *upload_file, *URL;
+ const char *parent_URL, *basename;
+ svn_ra_plugin_t *ra_lib;
+ void *session, *ra_baton;
+ svn_revnum_t rev;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ svn_dirent_t *dirent;
+ svn_ra_callbacks_t *cbtable;
+ apr_hash_t *cfg_hash;
+ svn_auth_baton_t *auth_baton;
+
+ if (argc <= 2)
+ {
+ printf ("Usage: %s PATH URL\n", argv[0]);
+ printf (" Uploads file at PATH to Subversion repository URL.\n");
+ return EXIT_FAILURE;
+ }
+ upload_file = argv[1];
+ URL = argv[2];
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create top-level memory pool. Be sure to read the HACKING file to
+ understand how to properly use/free subpools. */
+ pool = svn_pool_create (NULL);
+
+ /* Initialize the FS library. */
+ err = svn_fs_initialize (pool);
+ if (err) goto hit_error;
+
+ /* Make sure the ~/.subversion run-time config files exist, and load. */
+ err = svn_config_ensure (NULL, pool);
+ if (err) goto hit_error;
+
+ err = svn_config_get_config (&cfg_hash, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Build an authentication baton. */
+ {
+ /* There are many different kinds of authentication back-end
+ "providers". See svn_auth.h for a full overview. */
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers
+ = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *));
+
+ svn_client_get_simple_prompt_provider (&provider,
+ my_simple_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ svn_client_get_username_prompt_provider (&provider,
+ my_username_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ /* Register the auth-providers into the context's auth_baton. */
+ svn_auth_open (&auth_baton, providers, pool);
+ }
+
+ /* Create a table of callbacks for the RA session, mostly nonexistent. */
+ cbtable = apr_pcalloc (pool, sizeof(*cbtable));
+ cbtable->auth_baton = auth_baton;
+ cbtable->open_tmp_file = open_tmp_file;
+
+ /* Now do the real work. */
+
+ /* Open an RA session to the parent URL, fetch current HEAD rev and
+ "lock" onto that revnum for the remainder of the session. */
+ svn_path_split (URL, &parent_URL, &basename, pool);
+
+ err = svn_ra_init_ra_libs (&ra_baton, pool);
+ if (err) goto hit_error;
+
+ err = svn_ra_get_ra_library (&ra_lib, ra_baton, parent_URL, pool);
+ if (err) goto hit_error;
+
+ err = ra_lib->open (&session, parent_URL, cbtable, NULL, cfg_hash, pool);
+ if (err) goto hit_error;
+
+ err = ra_lib->get_latest_revnum (session, &rev, pool);
+ if (err) goto hit_error;
+
+ /* Examine contents of parent dir in the rev. */
+ err = ra_lib->get_dir (session, "", rev, &dirents, NULL, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Sanity checks. Don't let the user shoot himself *too* much. */
+ dirent = apr_hash_get (dirents, basename, APR_HASH_KEY_STRING);
+ if (dirent && dirent->kind == svn_node_dir)
+ {
+ printf ("Sorry, a directory already exists at that URL.\n");
+ return EXIT_FAILURE;
+ }
+ if (dirent && dirent->kind == svn_node_file)
+ {
+ char answer[5];
+
+ printf ("\n*** WARNING ***\n\n");
+ printf ("You're about to overwrite r%ld of this file.\n", rev);
+ printf ("It was last changed by user '%s',\n",
+ dirent->last_author ? dirent->last_author : "?");
+ printf ("on %s.\n", svn_time_to_human_cstring (dirent->time, pool));
+ printf ("\nSomebody *might* have just changed the file seconds ago,\n"
+ "and your upload would be overwriting their changes!\n\n");
+
+ err = prompt_and_read_line("Are you SURE you want to upload? [y/n]",
+ answer, sizeof(answer));
+ if (err) goto hit_error;
+
+ if (apr_strnatcasecmp (answer, "y"))
+ {
+ printf ("Operation aborted.\n");
+ return EXIT_SUCCESS;
+ }
+ }
+
+ /* Fetch a commit editor (it's anchored on the parent URL, because
+ the session is too.) */
+ /* ### someday add an option for a user-written commit message? */
+ err = ra_lib->get_commit_editor (session, &editor, &edit_baton,
+ "File upload from 'svnput' program.",
+ my_commit_callback, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Drive the editor */
+ {
+ void *root_baton, *file_baton, *handler_baton;
+ svn_txdelta_window_handler_t handler;
+ svn_stream_t *contents;
+ apr_file_t *f = NULL;
+
+ err = editor->open_root (edit_baton, rev, pool, &root_baton);
+ if (err) goto hit_error;
+
+ if (! dirent)
+ {
+ err = editor->add_file (basename, root_baton, NULL, SVN_INVALID_REVNUM,
+ pool, &file_baton);
+ }
+ else
+ {
+ err = editor->open_file (basename, root_baton, rev, pool,
+ &file_baton);
+ }
+ if (err) goto hit_error;
+
+ err = editor->apply_textdelta (file_baton, NULL, pool,
+ &handler, &handler_baton);
+ if (err) goto hit_error;
+
+ err = svn_io_file_open (&f, upload_file, APR_READ, APR_OS_DEFAULT, pool);
+ if (err) goto hit_error;
+
+ contents = svn_stream_from_aprfile (f, pool);
+ err = svn_txdelta_send_stream (contents, handler, handler_baton,
+ NULL, pool);
+ if (err) goto hit_error;
+
+ err = svn_io_file_close (f, pool);
+ if (err) goto hit_error;
+
+ err = editor->close_file (file_baton, NULL, pool);
+ if (err) goto hit_error;
+
+ err = editor->close_edit (edit_baton, pool);
+ if (err) goto hit_error;
+ }
+
+ return EXIT_SUCCESS;
+
+ hit_error:
+ svn_handle_error2 (err, stderr, FALSE, "svnput: ");
+ return EXIT_FAILURE;
+}
diff --git a/tools/examples/svnserve-sgid.c b/tools/examples/svnserve-sgid.c
new file mode 100644
index 0000000..29c7272
--- /dev/null
+++ b/tools/examples/svnserve-sgid.c
@@ -0,0 +1,60 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+/*
+ * Wrapper to run the svnserve process setgid.
+ * The idea is to avoid the problem that some interpreters like bash
+ * invoked by svnserve in hook scripts will reset the effective gid to
+ * the real gid, nuking the effect of an ordinary setgid svnserve binary.
+ * Sadly, to set the real gid portably, you need to be root, if only
+ * for a moment.
+ * Also smashes the environment to something known, so that games
+ * can't be played to try to break the security of the hook scripts,
+ * by setting IFS, PATH, and similar means.
+ */
+/*
+ * Written by Perry Metzger, and placed into the public domain.
+ */
+
+#include <stdio.h>
+#include <unistd.h>
+
+#define REAL_PATH "/usr/bin/svnserve.real"
+
+char *newenv[] = { "PATH=/bin:/usr/bin", "SHELL=/bin/sh", NULL };
+
+int
+main(int argc, char **argv)
+{
+ if (setgid(getegid()) == -1) {
+ perror("setgid(getegid())");
+ return 1;
+ }
+
+ if (seteuid(getuid()) == -1) {
+ perror("seteuid(getuid())");
+ return 1;
+ }
+
+ execve(REAL_PATH, argv, newenv);
+ perror("attempting to exec " REAL_PATH " failed");
+ return 1;
+}
diff --git a/tools/examples/svnshell.py b/tools/examples/svnshell.py
new file mode 100755
index 0000000..9c67af4
--- /dev/null
+++ b/tools/examples/svnshell.py
@@ -0,0 +1,367 @@
+#!/usr/bin/env python
+#
+# svnshell.py : a Python-based shell interface for cruising 'round in
+# the filesystem.
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+import sys
+import time
+import re
+from cmd import Cmd
+from random import randint
+from svn import fs, core, repos
+
+
+class SVNShell(Cmd):
+ def __init__(self, path):
+ """initialize an SVNShell object"""
+ Cmd.__init__(self)
+ path = core.svn_path_canonicalize(path)
+ self.fs_ptr = repos.fs(repos.open(path))
+ self.is_rev = 1
+ self.rev = fs.youngest_rev(self.fs_ptr)
+ self.txn = None
+ self.root = fs.revision_root(self.fs_ptr, self.rev)
+ self.path = "/"
+ self._setup_prompt()
+ self.cmdloop()
+
+ def precmd(self, line):
+ if line == "EOF":
+ # Ctrl-D is a command without a newline. Print a newline, so the next
+ # shell prompt is not on the same line as the last svnshell prompt.
+ print("")
+ return "exit"
+ return line
+
+ def postcmd(self, stop, line):
+ self._setup_prompt()
+
+ _errors = ["Huh?",
+ "Whatchoo talkin' 'bout, Willis?",
+ "Say what?",
+ "Nope. Not gonna do it.",
+ "Ehh...I don't think so, chief."]
+
+ def default(self, line):
+ print(self._errors[randint(0, len(self._errors) - 1)])
+
+ def do_cat(self, arg):
+ """dump the contents of a file"""
+ if not len(arg):
+ print("You must supply a file path.")
+ return
+ catpath = self._parse_path(arg)
+ kind = fs.check_path(self.root, catpath)
+ if kind == core.svn_node_none:
+ print("Path '%s' does not exist." % catpath)
+ return
+ if kind == core.svn_node_dir:
+ print("Path '%s' is not a file." % catpath)
+ return
+ ### be nice to get some paging in here.
+ stream = fs.file_contents(self.root, catpath)
+ while True:
+ data = core.svn_stream_read(stream, core.SVN_STREAM_CHUNK_SIZE)
+ sys.stdout.write(data)
+ if len(data) < core.SVN_STREAM_CHUNK_SIZE:
+ break
+
+ def do_cd(self, arg):
+ """change directory"""
+ newpath = self._parse_path(arg)
+
+ # make sure that path actually exists in the filesystem as a directory
+ kind = fs.check_path(self.root, newpath)
+ if kind != core.svn_node_dir:
+ print("Path '%s' is not a valid filesystem directory." % newpath)
+ return
+ self.path = newpath
+
+ def do_ls(self, arg):
+ """list the contents of the current directory or provided path"""
+ parent = self.path
+ if not len(arg):
+ # no arg -- show a listing for the current directory.
+ entries = fs.dir_entries(self.root, self.path)
+ else:
+ # arg? show a listing of that path.
+ newpath = self._parse_path(arg)
+ kind = fs.check_path(self.root, newpath)
+ if kind == core.svn_node_dir:
+ parent = newpath
+ entries = fs.dir_entries(self.root, parent)
+ elif kind == core.svn_node_file:
+ parts = self._path_to_parts(newpath)
+ name = parts.pop(-1)
+ parent = self._parts_to_path(parts)
+ print(parent + ':' + name)
+ tmpentries = fs.dir_entries(self.root, parent)
+ if not tmpentries.get(name, None):
+ return
+ entries = {}
+ entries[name] = tmpentries[name]
+ else:
+ print("Path '%s' not found." % newpath)
+ return
+
+ keys = sorted(entries.keys())
+
+ print(" REV AUTHOR NODE-REV-ID SIZE DATE NAME")
+ print("----------------------------------------------------------------------------")
+
+ for entry in keys:
+ fullpath = parent + '/' + entry
+ size = ''
+ is_dir = fs.is_dir(self.root, fullpath)
+ if is_dir:
+ name = entry + '/'
+ else:
+ size = str(fs.file_length(self.root, fullpath))
+ name = entry
+ node_id = fs.unparse_id(entries[entry].id)
+ created_rev = fs.node_created_rev(self.root, fullpath)
+ author = fs.revision_prop(self.fs_ptr, created_rev,
+ core.SVN_PROP_REVISION_AUTHOR)
+ if not author:
+ author = ""
+ date = fs.revision_prop(self.fs_ptr, created_rev,
+ core.SVN_PROP_REVISION_DATE)
+ if not date:
+ date = ""
+ else:
+ date = self._format_date(date)
+
+ print("%6s %8s %12s %8s %12s %s" % (created_rev, author[:8],
+ node_id, size, date, name))
+
+ def do_lstxns(self, arg):
+ """list the transactions available for browsing"""
+ txns = sorted(fs.list_transactions(self.fs_ptr))
+ counter = 0
+ for txn in txns:
+ counter = counter + 1
+ sys.stdout.write("%8s " % txn)
+ if counter == 6:
+ print("")
+ counter = 0
+ print("")
+
+ def do_pcat(self, arg):
+ """list the properties of a path"""
+ catpath = self.path
+ if len(arg):
+ catpath = self._parse_path(arg)
+ kind = fs.check_path(self.root, catpath)
+ if kind == core.svn_node_none:
+ print("Path '%s' does not exist." % catpath)
+ return
+ plist = fs.node_proplist(self.root, catpath)
+ if not plist:
+ return
+ for pkey, pval in plist.items():
+ print('K ' + str(len(pkey)))
+ print(pkey)
+ print('P ' + str(len(pval)))
+ print(pval)
+ print('PROPS-END')
+
+ def do_setrev(self, arg):
+ """set the current revision to view"""
+ try:
+ if arg.lower() == 'head':
+ rev = fs.youngest_rev(self.fs_ptr)
+ else:
+ rev = int(arg)
+ newroot = fs.revision_root(self.fs_ptr, rev)
+ except:
+ print("Error setting the revision to '" + arg + "'.")
+ return
+ fs.close_root(self.root)
+ self.root = newroot
+ self.rev = rev
+ self.is_rev = 1
+ self._do_path_landing()
+
+ def do_settxn(self, arg):
+ """set the current transaction to view"""
+ try:
+ txnobj = fs.open_txn(self.fs_ptr, arg)
+ newroot = fs.txn_root(txnobj)
+ except:
+ print("Error setting the transaction to '" + arg + "'.")
+ return
+ fs.close_root(self.root)
+ self.root = newroot
+ self.txn = arg
+ self.is_rev = 0
+ self._do_path_landing()
+
+ def do_youngest(self, arg):
+ """list the youngest revision available for browsing"""
+ rev = fs.youngest_rev(self.fs_ptr)
+ print(rev)
+
+ def do_exit(self, arg):
+ sys.exit(0)
+
+ def _path_to_parts(self, path):
+ return [_f for _f in path.split('/') if _f]
+
+ def _parts_to_path(self, parts):
+ return '/' + '/'.join(parts)
+
+ def _parse_path(self, path):
+ # cleanup leading, trailing, and duplicate '/' characters
+ newpath = self._parts_to_path(self._path_to_parts(path))
+
+ # if PATH is absolute, use it, else append it to the existing path.
+ if path.startswith('/') or self.path == '/':
+ newpath = '/' + newpath
+ else:
+ newpath = self.path + '/' + newpath
+
+ # cleanup '.' and '..'
+ parts = self._path_to_parts(newpath)
+ finalparts = []
+ for part in parts:
+ if part == '.':
+ pass
+ elif part == '..':
+ if len(finalparts) != 0:
+ finalparts.pop(-1)
+ else:
+ finalparts.append(part)
+
+ # finally, return the calculated path
+ return self._parts_to_path(finalparts)
+
+ def _format_date(self, date):
+ date = core.svn_time_from_cstring(date)
+ date = time.asctime(time.localtime(date / 1000000))
+ return date[4:-8]
+
+ def _do_path_landing(self):
+ """try to land on self.path as a directory in root, failing up to '/'"""
+ not_found = 1
+ newpath = self.path
+ while not_found:
+ kind = fs.check_path(self.root, newpath)
+ if kind == core.svn_node_dir:
+ not_found = 0
+ else:
+ parts = self._path_to_parts(newpath)
+ parts.pop(-1)
+ newpath = self._parts_to_path(parts)
+ self.path = newpath
+
+ def _setup_prompt(self):
+ """present the prompt and handle the user's input"""
+ if self.is_rev:
+ self.prompt = "<rev: " + str(self.rev)
+ else:
+ self.prompt = "<txn: " + self.txn
+ self.prompt += " " + self.path + ">$ "
+
+ def _complete(self, text, line, begidx, endidx, limit_node_kind=None):
+ """Generic tab completer. Takes the 4 standard parameters passed to a
+ cmd.Cmd completer function, plus LIMIT_NODE_KIND, which should be a
+ svn.core.svn_node_foo constant to restrict the returned completions to, or
+ None for no limit. Catches and displays exceptions, because otherwise
+ they are silently ignored - which is quite frustrating when debugging!"""
+ try:
+ args = line.split()
+ if len(args) > 1:
+ arg = args[1]
+ else:
+ arg = ""
+ dirs = arg.split('/')
+ user_elem = dirs[-1]
+ user_dir = "/".join(dirs[:-1] + [''])
+
+ canon_dir = self._parse_path(user_dir)
+
+ entries = fs.dir_entries(self.root, canon_dir)
+ acceptable_completions = []
+ for name, dirent_t in entries.items():
+ if not name.startswith(user_elem):
+ continue
+ if limit_node_kind and dirent_t.kind != limit_node_kind:
+ continue
+ if dirent_t.kind == core.svn_node_dir:
+ name += '/'
+ acceptable_completions.append(name)
+ if limit_node_kind == core.svn_node_dir or not limit_node_kind:
+ if user_elem in ('.', '..'):
+ for extraname in ('.', '..'):
+ if extraname.startswith(user_elem):
+ acceptable_completions.append(extraname + '/')
+ return acceptable_completions
+ except:
+ ei = sys.exc_info()
+ sys.stderr.write("EXCEPTION WHILST COMPLETING\n")
+ import traceback
+ traceback.print_tb(ei[2])
+ sys.stderr.write("%s: %s\n" % (ei[0], ei[1]))
+ raise
+
+ def complete_cd(self, text, line, begidx, endidx):
+ return self._complete(text, line, begidx, endidx, core.svn_node_dir)
+
+ def complete_cat(self, text, line, begidx, endidx):
+ return self._complete(text, line, begidx, endidx, core.svn_node_file)
+
+ def complete_ls(self, text, line, begidx, endidx):
+ return self._complete(text, line, begidx, endidx)
+
+ def complete_pcat(self, text, line, begidx, endidx):
+ return self._complete(text, line, begidx, endidx)
+
+
+def _basename(path):
+ "Return the basename for a '/'-separated path."
+ idx = path.rfind('/')
+ if idx == -1:
+ return path
+ return path[idx+1:]
+
+
+def usage(exit):
+ if exit:
+ output = sys.stderr
+ else:
+ output = sys.stdout
+ output.write(
+ "usage: %s REPOS_PATH\n"
+ "\n"
+ "Once the program has started, type 'help' at the prompt for hints on\n"
+ "using the shell.\n" % sys.argv[0])
+ sys.exit(exit)
+
+def main():
+ if len(sys.argv) != 2:
+ usage(1)
+
+ SVNShell(sys.argv[1])
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/examples/svnshell.rb b/tools/examples/svnshell.rb
new file mode 100755
index 0000000..a49000e
--- /dev/null
+++ b/tools/examples/svnshell.rb
@@ -0,0 +1,456 @@
+#!/usr/bin/env ruby
+#
+# svnshell.rb : a Ruby-based shell interface for cruising 'round in
+# the filesystem.
+#
+# Usage: ruby svnshell.rb REPOS_PATH, where REPOS_PATH is a path to
+# a repository on your local filesystem.
+#
+# NOTE: This program requires the Ruby readline extension.
+# See http://wiki.rubyonrails.com/rails/show/ReadlineLibrary
+# for details on how to install readline for Ruby.
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+require "readline"
+require "shellwords"
+
+require "svn/fs"
+require "svn/core"
+require "svn/repos"
+
+# SvnShell: a Ruby-based shell interface for cruising 'round in
+# the filesystem.
+class SvnShell
+
+ # A list of potential commands. This list is populated by
+ # the 'method_added' function (see below).
+ WORDS = []
+
+ # Check for methods that start with "do_"
+ # and list them as potential commands
+ class << self
+ def method_added(name)
+ if /^do_(.*)$/ =~ name.to_s
+ WORDS << $1
+ end
+ end
+ end
+
+ # Constructor for SvnShell
+ #
+ # path: The path to a Subversion repository
+ def initialize(path)
+ @repos_path = path
+ @path = "/"
+ self.rev = youngest_rev
+ @exited = false
+ end
+
+ # Run the shell
+ def run
+
+ # While the user hasn't typed 'exit' and there is still input to be read
+ while !@exited and buf = Readline.readline(prompt, true)
+
+ # Parse the command line into a single command and arguments
+ cmd, *args = Shellwords.shellwords(buf)
+
+ # Skip empty lines
+ next if /\A\s*\z/ =~ cmd.to_s
+
+ # Open a new connection to the repo
+ @fs = Svn::Repos.open(@repos_path).fs
+ setup_root
+
+ # Execute the specified command
+ dispatch(cmd, *args)
+
+ # Find a path that exists in the current revision
+ @path = find_available_path
+
+ # Close the connection to the repo
+ @root.close
+
+ end
+ end
+
+ # Private functions
+ private
+
+ # Get the current prompt string
+ def prompt
+
+ # Gather data for the prompt string
+ if rev_mode?
+ mode = "rev"
+ info = @rev
+ else
+ mode = "txn"
+ info = @txn
+ end
+
+ # Return the prompt string
+ "<#{mode}: #{info} #{@path}>$ "
+ end
+
+ # Dispatch a command to the appropriate do_* subroutine
+ def dispatch(cmd, *args)
+
+ # Dispatch cmd to the appropriate do_* function
+ if respond_to?("do_#{cmd}", true)
+ begin
+ __send__("do_#{cmd}", *args)
+ rescue ArgumentError
+ # puts $!.message
+ # puts $@
+ puts("Invalid argument for #{cmd}: #{args.join(' ')}")
+ end
+ else
+ puts("Unknown subcommand: #{cmd}")
+ puts("Try one of these commands: ", WORDS.sort.join(" "))
+ end
+ end
+
+ # Output the contents of a file from the repository
+ def do_cat(path)
+
+ # Normalize the path to an absolute path
+ normalized_path = normalize_path(path)
+
+ # Check what type of node exists at the specified path
+ case @root.check_path(normalized_path)
+ when Svn::Core::NODE_NONE
+ puts "Path '#{normalized_path}' does not exist."
+ when Svn::Core::NODE_DIR
+ puts "Path '#{normalized_path}' is not a file."
+ else
+ # Output the file to standard out
+ @root.file_contents(normalized_path) do |stream|
+ puts stream.read(@root.file_length(normalized_path))
+ end
+ end
+ end
+
+ # Set the current directory
+ def do_cd(path="/")
+
+ # Normalize the path to an absolute path
+ normalized_path = normalize_path(path)
+
+ # If it's a valid directory, then set the directory
+ if @root.check_path(normalized_path) == Svn::Core::NODE_DIR
+ @path = normalized_path
+ else
+ puts "Path '#{normalized_path}' is not a valid filesystem directory."
+ end
+ end
+
+ # List the contents of the current directory or provided paths
+ def do_ls(*paths)
+
+ # Default to listing the contents of the current directory
+ paths << @path if paths.empty?
+
+ # Foreach path
+ paths.each do |path|
+
+ # Normalize the path to an absolute path
+ normalized_path = normalize_path(path)
+
+ # Is it a directory or file?
+ case @root.check_path(normalized_path)
+ when Svn::Core::NODE_DIR
+
+ # Output the contents of the directory
+ parent = normalized_path
+ entries = @root.dir_entries(parent)
+
+ when Svn::Core::NODE_FILE
+
+ # Split the path into directory and filename components
+ parts = path_to_parts(normalized_path)
+ name = parts.pop
+ parent = parts_to_path(parts)
+
+ # Output the filename
+ puts "#{parent}:#{name}"
+
+ # Double check that the file exists
+ # inside the parent directory
+ parent_entries = @root.dir_entries(parent)
+ if parent_entries[name].nil?
+ # Hmm. We found the file, but it doesn't exist inside
+ # the parent directory. That's a bit unusual.
+ puts "No directory entry found for '#{normalized_path}'"
+ next
+ else
+ # Save the path so it can be output in detail
+ entries = {name => parent_entries[name]}
+ end
+ else
+ # Path is not a directory or a file,
+ # so it must not exist
+ puts "Path '#{normalized_path}' not found."
+ next
+ end
+
+ # Output a detailed listing of the files we found
+ puts " REV AUTHOR NODE-REV-ID SIZE DATE NAME"
+ puts "-" * 76
+
+ # For each entry we found...
+ entries.keys.sort.each do |entry|
+
+ # Calculate the full path to the directory entry
+ fullpath = parent + '/' + entry
+ if @root.dir?(fullpath)
+ # If it's a directory, output an extra slash
+ size = ''
+ name = entry + '/'
+ else
+ # If it's a file, output the size of the file
+ size = @root.file_length(fullpath).to_i.to_s
+ name = entry
+ end
+
+ # Output the entry
+ node_id = entries[entry].id.to_s
+ created_rev = @root.node_created_rev(fullpath)
+ author = @fs.prop(Svn::Core::PROP_REVISION_AUTHOR, created_rev).to_s
+ date = @fs.prop(Svn::Core::PROP_REVISION_DATE, created_rev)
+ args = [
+ created_rev, author[0,8],
+ node_id, size, date.strftime("%b %d %H:%M(%Z)"), name
+ ]
+ puts "%6s %8s <%10s> %8s %17s %s" % args
+
+ end
+ end
+ end
+
+ # List all currently open transactions available for browsing
+ def do_lstxns
+
+ # Get a sorted list of open transactions
+ txns = @fs.transactions
+ txns.sort
+ counter = 0
+
+ # Output the open transactions
+ txns.each do |txn|
+ counter = counter + 1
+ puts "%8s " % txn
+
+ # Every six transactions, output an extra newline
+ if counter == 6
+ puts
+ counter = 0
+ end
+ end
+ puts
+ end
+
+ # Output the properties of a particular path
+ def do_pcat(path=nil)
+
+ # Default to the current directory
+ catpath = path ? normalize_path(path) : @path
+
+ # Make sure that the specified path exists
+ if @root.check_path(catpath) == Svn::Core::NODE_NONE
+ puts "Path '#{catpath}' does not exist."
+ return
+ end
+
+ # Get the list of properties
+ plist = @root.node_proplist(catpath)
+ return if plist.nil?
+
+ # Output each property
+ plist.each do |key, value|
+ puts "K #{key.size}"
+ puts key
+ puts "P #{value.size}"
+ puts value
+ end
+
+ # That's all folks!
+ puts 'PROPS-END'
+
+ end
+
+ # Set the current revision to view
+ def do_setrev(rev)
+
+ # Make sure the specified revision exists
+ begin
+ @fs.root(Integer(rev)).close
+ rescue Svn::Error
+ puts "Error setting the revision to '#{rev}': #{$!.message}"
+ return
+ end
+
+ # Set the revision
+ self.rev = Integer(rev)
+
+ end
+
+ # Open an existing transaction to view
+ def do_settxn(name)
+
+ # Make sure the specified transaction exists
+ begin
+ txn = @fs.open_txn(name)
+ txn.root.close
+ rescue Svn::Error
+ puts "Error setting the transaction to '#{name}': #{$!.message}"
+ return
+ end
+
+ # Set the transaction
+ self.txn = name
+
+ end
+
+ # List the youngest revision available for browsing
+ def do_youngest
+ rev = @fs.youngest_rev
+ puts rev
+ end
+
+ # Exit this program
+ def do_exit
+ @exited = true
+ end
+
+ # Find the youngest revision
+ def youngest_rev
+ Svn::Repos.open(@repos_path).fs.youngest_rev
+ end
+
+ # Set the current revision
+ def rev=(new_value)
+ @rev = new_value
+ @txn = nil
+ reset_root
+ end
+
+ # Set the current transaction
+ def txn=(new_value)
+ @txn = new_value
+ reset_root
+ end
+
+ # Check whether we are in 'revision-mode'
+ def rev_mode?
+ @txn.nil?
+ end
+
+ # Close the current root and setup a new one
+ def reset_root
+ if @root
+ @root.close
+ setup_root
+ end
+ end
+
+ # Setup a new root
+ def setup_root
+ if rev_mode?
+ @root = @fs.root(@rev)
+ else
+ @root = @fs.open_txn(name).root
+ end
+ end
+
+ # Convert a path into its component parts
+ def path_to_parts(path)
+ path.split(/\/+/)
+ end
+
+ # Join the component parts of a path into a string
+ def parts_to_path(parts)
+ normalized_parts = parts.reject{|part| part.empty?}
+ "/#{normalized_parts.join('/')}"
+ end
+
+ # Convert a path to a normalized, absolute path
+ def normalize_path(path)
+
+ # Convert the path to an absolute path
+ if path[0,1] != "/" and @path != "/"
+ path = "#{@path}/#{path}"
+ end
+
+ # Split the path into its component parts
+ parts = path_to_parts(path)
+
+ # Build a list of the normalized parts of the path
+ normalized_parts = []
+ parts.each do |part|
+ case part
+ when "."
+ # ignore
+ when ".."
+ normalized_parts.pop
+ else
+ normalized_parts << part
+ end
+ end
+
+ # Join the normalized parts together into a string
+ parts_to_path(normalized_parts)
+
+ end
+
+ # Find the parent directory of a specified path
+ def parent_dir(path)
+ normalize_path("#{path}/..")
+ end
+
+ # Try to land on the specified path as a directory.
+ # If the specified path does not exist, look for
+ # an ancestor path that does exist.
+ def find_available_path(path=@path)
+ if @root.check_path(path) == Svn::Core::NODE_DIR
+ path
+ else
+ find_available_path(parent_dir(path))
+ end
+ end
+
+end
+
+
+# Autocomplete commands
+Readline.completion_proc = Proc.new do |word|
+ SvnShell::WORDS.grep(/^#{Regexp.quote(word)}/)
+end
+
+# Output usage information if necessary
+if ARGV.size != 1
+ puts "Usage: #{$0} REPOS_PATH"
+ exit(1)
+end
+
+# Create a new SvnShell with the command-line arguments and run it
+SvnShell.new(ARGV.shift).run
diff --git a/tools/examples/testwrite.c b/tools/examples/testwrite.c
new file mode 100644
index 0000000..beb2fba
--- /dev/null
+++ b/tools/examples/testwrite.c
@@ -0,0 +1,276 @@
+/*
+ * testwrite.c : test whether a user has commit access.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * To compile on unix against Subversion and APR libraries, try
+ * something like:
+ *
+ * cc testwrite.c -o testwrite \
+ * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \
+ * -L/usr/local/apache2/lib -L/usr/local/lib \
+ * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lsvn-fs-1 -lapr-0 -laprutil-0
+ *
+ */
+
+#include "svn_client.h"
+#include "svn_pools.h"
+#include "svn_config.h"
+#include "svn_fs.h"
+#include "svn_cmdline.h"
+#include "svn_path.h"
+#include "svn_time.h"
+
+
+/* Display a prompt and read a one-line response into the provided buffer,
+ removing a trailing newline if present. */
+static svn_error_t *
+prompt_and_read_line(const char *prompt,
+ char *buffer,
+ size_t max)
+{
+ int len;
+ printf("%s: ", prompt);
+ if (fgets(buffer, max, stdin) == NULL)
+ return svn_error_create(0, NULL, "error reading stdin");
+ len = strlen(buffer);
+ if (len > 0 && buffer[len-1] == '\n')
+ buffer[len-1] = 0;
+ return SVN_NO_ERROR;
+}
+
+/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For
+ a much better example, see svn_cl__auth_simple_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_simple_prompt_callback (svn_auth_cred_simple_t **cred,
+ void *baton,
+ const char *realm,
+ const char *username,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ if (username)
+ ret->username = apr_pstrdup (pool, username);
+ else
+ {
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+ }
+
+ SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf)));
+ ret->password = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+
+/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For
+ a much better example, see svn_cl__auth_username_prompt in the official
+ svn cmdline client. */
+static svn_error_t *
+my_username_prompt_callback (svn_auth_cred_username_t **cred,
+ void *baton,
+ const char *realm,
+ svn_boolean_t may_save,
+ apr_pool_t *pool)
+{
+ svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret));
+ char answerbuf[100];
+
+ if (realm)
+ {
+ printf ("Authentication realm: %s\n", realm);
+ }
+
+ SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf)));
+ ret->username = apr_pstrdup (pool, answerbuf);
+
+ *cred = ret;
+ return SVN_NO_ERROR;
+}
+
+/* A callback function used when the RA layer needs a handle to a
+ temporary file. This is a reduced version of the callback used in
+ the official svn cmdline client. */
+static svn_error_t *
+open_tmp_file (apr_file_t **fp,
+ void *callback_baton,
+ apr_pool_t *pool)
+{
+ const char *path;
+ const char *ignored_filename;
+
+ SVN_ERR (svn_io_temp_dir (&path, pool));
+ path = svn_path_join (path, "tempfile", pool);
+
+ /* Open a unique file, with delete-on-close set. */
+ SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename,
+ path, ".tmp",
+ svn_io_file_del_on_close, pool));
+
+ return SVN_NO_ERROR;
+}
+
+
+/* Called when a commit is successful. */
+static svn_error_t *
+my_commit_callback (svn_revnum_t new_revision,
+ const char *date,
+ const char *author,
+ void *baton)
+{
+ printf ("Upload complete. Committed revision %ld.\n", new_revision);
+ return SVN_NO_ERROR;
+}
+
+
+
+int
+main (int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err;
+ apr_hash_t *dirents;
+ const char *upload_file, *URL;
+ const char *parent_URL, *basename;
+ svn_ra_plugin_t *ra_lib;
+ void *session, *ra_baton;
+ svn_revnum_t rev;
+ const svn_delta_editor_t *editor;
+ void *edit_baton;
+ svn_dirent_t *dirent;
+ svn_ra_callbacks_t *cbtable;
+ apr_hash_t *cfg_hash;
+ svn_auth_baton_t *auth_baton;
+
+ if (argc <= 1)
+ {
+ printf ("Usage: %s URL\n", argv[0]);
+ printf (" Tries to create an svn commit-transaction at URL.\n");
+ return EXIT_FAILURE;
+ }
+ URL = argv[1];
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create top-level memory pool. Be sure to read the HACKING file to
+ understand how to properly use/free subpools. */
+ pool = svn_pool_create (NULL);
+
+ /* Initialize the FS library. */
+ err = svn_fs_initialize (pool);
+ if (err) goto hit_error;
+
+ /* Make sure the ~/.subversion run-time config files exist, and load. */
+ err = svn_config_ensure (NULL, pool);
+ if (err) goto hit_error;
+
+ err = svn_config_get_config (&cfg_hash, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Build an authentication baton. */
+ {
+ /* There are many different kinds of authentication back-end
+ "providers". See svn_auth.h for a full overview. */
+ svn_auth_provider_object_t *provider;
+ apr_array_header_t *providers
+ = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *));
+
+ svn_client_get_simple_prompt_provider (&provider,
+ my_simple_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ svn_client_get_username_prompt_provider (&provider,
+ my_username_prompt_callback,
+ NULL, /* baton */
+ 2, /* retry limit */ pool);
+ APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider;
+
+ /* Register the auth-providers into the context's auth_baton. */
+ svn_auth_open (&auth_baton, providers, pool);
+ }
+
+ /* Create a table of callbacks for the RA session, mostly nonexistent. */
+ cbtable = apr_pcalloc (pool, sizeof(*cbtable));
+ cbtable->auth_baton = auth_baton;
+ cbtable->open_tmp_file = open_tmp_file;
+
+ /* Now do the real work. */
+
+ /* Open an RA session to the parent URL, fetch current HEAD rev and
+ "lock" onto that revnum for the remainder of the session. */
+ svn_path_split (URL, &parent_URL, &basename, pool);
+
+ err = svn_ra_init_ra_libs (&ra_baton, pool);
+ if (err) goto hit_error;
+
+ err = svn_ra_get_ra_library (&ra_lib, ra_baton, parent_URL, pool);
+ if (err) goto hit_error;
+
+ err = ra_lib->open (&session, parent_URL, cbtable, NULL, cfg_hash, pool);
+ if (err) goto hit_error;
+
+ /* Fetch a commit editor (it's anchored on the parent URL, because
+ the session is too.) */
+ /* ### someday add an option for a user-written commit message? */
+ err = ra_lib->get_commit_editor (session, &editor, &edit_baton,
+ "File upload from 'svnput' program.",
+ my_commit_callback, NULL, pool);
+ if (err) goto hit_error;
+
+ /* Drive the editor */
+ {
+ void *root_baton, *file_baton, *handler_baton;
+ svn_txdelta_window_handler_t handler;
+ svn_stream_t *contents;
+ apr_file_t *f = NULL;
+
+ err = editor->open_root (edit_baton, rev, pool, &root_baton);
+ if (err) goto hit_error;
+
+ err = editor->abort_edit (edit_baton, pool);
+ if (err) goto hit_error;
+ }
+
+ printf ("No problems creating commit transaction.\n");
+ return EXIT_SUCCESS;
+
+ hit_error:
+ {
+ printf("Could not open a commit transaction.\n");
+ svn_handle_error2 (err, stderr, FALSE, "testwrite: ");
+ return EXIT_FAILURE;
+ }
+
+}
diff --git a/tools/examples/walk-config-auth.py b/tools/examples/walk-config-auth.py
new file mode 100755
index 0000000..5841b6c
--- /dev/null
+++ b/tools/examples/walk-config-auth.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import svn.core
+import svn.client
+
+if '--help' in sys.argv:
+ sys.stdout.write("""\
+Usage: %s [CONFIG_DIR]
+
+Crawl the authentication credentials cache under CONFIG_DIR (or the
+default user Subversion runtime configuration directory if not
+provided), displaying what is found and prompting the user regarding
+whether Subversion should or should not delete each cached set of
+credentials found.
+
+""" % (sys.argv[0]))
+ sys.exit(0)
+
+config_dir = svn.core.svn_config_get_user_config_path(None, '')
+if len(sys.argv) > 1:
+ config_dir = sys.argv[1]
+
+svn.core.svn_config_ensure(config_dir)
+
+def print_help():
+ sys.stdout.write("""\
+ Valid actions are as follows:
+ (v) view details of the credentials
+ (d) delete the credentials
+ (n) continue to next credentials
+ (q) quit the program
+ (?) show this help output
+
+""")
+
+def show_creds(hash):
+ hash_keys = hash.keys()
+ maxkeylen = max(map(len, hash_keys))
+ maxvallen = max(map(len, hash.values()))
+ hash_keys.sort()
+ sys.stdout.write("+")
+ sys.stdout.write("-" * (maxkeylen + 2))
+ sys.stdout.write("+")
+ sys.stdout.write("-" * (78 - maxkeylen - 2))
+ sys.stdout.write("\n")
+ for key in hash_keys:
+ sys.stdout.write("| %s | %s\n" % (key.ljust(maxkeylen), hash[key]))
+ sys.stdout.write("+")
+ sys.stdout.write("-" * (maxkeylen + 2))
+ sys.stdout.write("+")
+ sys.stdout.write("-" * (78 - maxkeylen - 2))
+ sys.stdout.write("\n")
+
+def walk_func(cred_kind, realmstring, hash, pool):
+ show_creds({ 'cred_kind' : cred_kind,
+ 'realmstring' : realmstring })
+ while 1:
+ yesno = raw_input(" Action (v/d/n/q/?) [n]? ")
+ if yesno == '?':
+ print_help()
+ elif yesno == 'v':
+ show_creds(hash)
+ elif yesno == 'n':
+ return 0
+ elif yesno == 'd':
+ return 1
+ elif yesno == 'q':
+ raise svn.core.SubversionException("", svn.core.SVN_ERR_CEASE_INVOCATION)
+ elif yesno == '':
+ return 0
+ else:
+ sys.stderr.write("ERROR: Invalid input")
+
+svn.core.svn_config_walk_auth_data(config_dir, walk_func)
diff --git a/tools/hook-scripts/CVE-2017-9800-pre-commit.py b/tools/hook-scripts/CVE-2017-9800-pre-commit.py
new file mode 100755
index 0000000..2971545
--- /dev/null
+++ b/tools/hook-scripts/CVE-2017-9800-pre-commit.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python2
+
+# Licensed under the same terms as Subversion: the Apache License, Version 2.0
+#
+# pre-commit hook script for Subversion CVE-2017-9800
+#
+# This prevents commits that set svn:externals containing suspicions
+# svn+ssh:// URLs.
+#
+# With this script installed a commit like the one below should fail:
+#
+# svnmucc -mm propset svn:externals 'svn+ssh://-localhost/X X' REPOSITORY-URL
+
+import sys, locale, urllib, urlparse, curses.ascii
+from svn import wc, repos, fs
+
+# A simple whitelist to ensure these are not suspicious:
+# user@server
+# [::1]:22
+# server-name
+# server_name
+# 127.0.0.1
+# with an extra restriction that a leading '-' is suspicious.
+def suspicious_host(host):
+ if host[0] == '-':
+ return True
+ for char in host:
+ if not curses.ascii.isalnum(char) and not char in ':.-_[]@':
+ return True
+ return False
+
+native = locale.getlocale()[1]
+if not native: native = 'ascii'
+repos_handle = repos.open(sys.argv[1].decode(native).encode('utf-8'))
+fs_handle = repos.fs(repos_handle)
+txn_handle = fs.open_txn(fs_handle, sys.argv[2].decode(native).encode('utf-8'))
+txn_root = fs.txn_root(txn_handle)
+rev_root = fs.revision_root(fs_handle, fs.txn_root_base_revision(txn_root))
+
+for path, change in fs.paths_changed2(txn_root).iteritems():
+
+ if change.prop_mod:
+
+ # The new value, if any
+ txn_prop = fs.node_prop(txn_root, path, "svn:externals")
+ if not txn_prop:
+ continue
+
+ # The old value, if any
+ rev_prop = None
+ if change.change_kind == fs.path_change_modify:
+ rev_prop = fs.node_prop(rev_root, path, "svn:externals")
+ elif change.change_kind == fs.path_change_add and change.copyfrom_path:
+ copy_root = fs.revision_root(fs_handle, change.copyfrom_rev)
+ rev_prop = fs.node_prop(copy_root, change.copyfrom_path,
+ "svn:externals")
+
+ if txn_prop != rev_prop:
+ error_path = path.decode('utf-8').encode(native, 'replace')
+ externals = []
+ try:
+ externals = wc.parse_externals_description2(path, txn_prop)
+ except:
+ sys.stderr.write("Commit blocked due to parse failure "
+ "on svn:externals for %s\n" % error_path)
+ sys.exit(1)
+ for external in externals:
+ parsed = urlparse.urlparse(urllib.unquote(external.url))
+ if (parsed and parsed.scheme[:4] == "svn+"
+ and suspicious_host(parsed.netloc)):
+ sys.stderr.write("Commit blocked due to suspicious URL "
+ "containing %r in svn:externals "
+ "for %s\n" % (parsed.netloc, error_path))
+ sys.exit(1)
diff --git a/tools/hook-scripts/commit-access-control.cfg.example b/tools/hook-scripts/commit-access-control.cfg.example
new file mode 100644
index 0000000..674c190
--- /dev/null
+++ b/tools/hook-scripts/commit-access-control.cfg.example
@@ -0,0 +1,74 @@
+# This is a sample configuration file for commit-access-control.pl.
+#
+# $Id: commit-access-control.cfg.example 845362 2003-03-12 05:56:21Z kfogel $
+#
+# This file uses the Windows ini style, where the file consists of a
+# number of sections, each section starts with a unique section name
+# in square brackets. Parameters in each section are specified as
+# Name = Value. Any spaces around the equal sign will be ignored. If
+# there are multiple sections with exactly the same section name, then
+# the parameters in those sections will be added together to produce
+# one section with cumulative parameters.
+#
+# The commit-access-control.pl script reads these sections in order,
+# so later sections may overwrite permissions granted or removed in
+# previous sections.
+#
+# Each section has three valid parameters. Any other parameters are
+# ignored.
+# access = (read-only|read-write)
+#
+# This parameter is a required parameter. Valid values are
+# `read-only' and `read-write'.
+#
+# The access rights to apply to modified files and directories
+# that match the `match' regular expression described later on.
+#
+# match = PERL_REGEX
+#
+# This parameter is a required parameter and its value is a Perl
+# regular expression.
+#
+# To help users that automatically write regular expressions that
+# match the beginning of absolute paths using ^/, the script
+# removes the / character because subversion paths, while they
+# start at the root level, do not begin with a /.
+#
+# users = username1 [username2 [username3 [username4 ...]]]
+# or
+# users = username1 [username2]
+# users = username3 username4
+#
+# This parameter is optional. The usernames listed here must be
+# exact usernames. There is no regular expression matching for
+# usernames. You may specify all the usernames that apply on one
+# line or split the names up on multiple lines.
+#
+# The access rights from `access' are applied to ALL modified
+# paths that match the `match' regular expression only if NO
+# usernames are specified in the section or if one of the listed
+# usernames matches the author of the commit.
+#
+# By default, because you're using commit-access-control.pl in the
+# first place to protect your repository, the script sets the
+# permissions to all files and directories in the repository to
+# read-only, so if you want to open up portions of the repository,
+# you'll need to edit this file.
+#
+# NOTE: NEVER GIVE DIFFERENT SECTIONS THE SAME SECTION NAME, OTHERWISE
+# THE PARAMETERS FOR THOSE SECTIONS WILL BE MERGED TOGETHER INTO ONE
+# SECTION AND YOUR SECURITY MAY BE COMPROMISED.
+
+[Make everything read-only for all users]
+match = .*
+access = read-only
+
+[Make project1 read-write for users Jane and Joe]
+match = ^(branches|tags|trunk)/project1
+users = jane joe
+access = read-write
+
+[However, we don't trust Joe with project1's Makefile]
+match = ^(branches|tags|trunk)/project1/Makefile
+users = joe
+access = read-only
diff --git a/tools/hook-scripts/commit-access-control.pl.in b/tools/hook-scripts/commit-access-control.pl.in
new file mode 100755
index 0000000..5710276
--- /dev/null
+++ b/tools/hook-scripts/commit-access-control.pl.in
@@ -0,0 +1,411 @@
+#!/usr/bin/env perl
+
+# ====================================================================
+# commit-access-control.pl: check if the user that submitted the
+# transaction TXN-NAME has the appropriate rights to perform the
+# commit in repository REPOS using the permissions listed in the
+# configuration file CONF_FILE.
+#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/hook-scripts/commit-access-control.pl.in $
+# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $
+# $LastChangedBy: hwright $
+# $LastChangedRevision: 880911 $
+#
+# Usage: commit-access-control.pl REPOS TXN-NAME CONF_FILE
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# Turn on warnings the best way depending on the Perl version.
+BEGIN {
+ if ( $] >= 5.006_000)
+ { require warnings; import warnings; }
+ else
+ { $^W = 1; }
+}
+
+use strict;
+use Carp;
+use Config::IniFiles 2.27;
+
+######################################################################
+# Configuration section.
+
+# Svnlook path.
+my $svnlook = "@SVN_BINDIR@/svnlook";
+
+# Since the path to svnlook depends upon the local installation
+# preferences, check that the required program exists to insure that
+# the administrator has set up the script properly.
+{
+ my $ok = 1;
+ foreach my $program ($svnlook)
+ {
+ if (-e $program)
+ {
+ unless (-x $program)
+ {
+ warn "$0: required program `$program' is not executable, ",
+ "edit $0.\n";
+ $ok = 0;
+ }
+ }
+ else
+ {
+ warn "$0: required program `$program' does not exist, edit $0.\n";
+ $ok = 0;
+ }
+ }
+ exit 1 unless $ok;
+}
+
+######################################################################
+# Initial setup/command-line handling.
+
+&usage unless @ARGV == 3;
+
+my $repos = shift;
+my $txn = shift;
+my $cfg_filename = shift;
+
+unless (-e $repos)
+ {
+ &usage("$0: repository directory `$repos' does not exist.");
+ }
+unless (-d $repos)
+ {
+ &usage("$0: repository directory `$repos' is not a directory.");
+ }
+unless (-e $cfg_filename)
+ {
+ &usage("$0: configuration file `$cfg_filename' does not exist.");
+ }
+unless (-r $cfg_filename)
+ {
+ &usage("$0: configuration file `$cfg_filename' is not readable.");
+ }
+
+# Define two constant subroutines to stand for read-only or read-write
+# access to the repository.
+sub ACCESS_READ_ONLY () { 'read-only' }
+sub ACCESS_READ_WRITE () { 'read-write' }
+
+######################################################################
+# Load the configuration file and validate it.
+my $cfg = Config::IniFiles->new(-file => $cfg_filename);
+unless ($cfg)
+ {
+ die "$0: error in loading configuration file `$cfg_filename'",
+ @Config::IniFiles::errors ? ":\n@Config::IniFiles::errors\n"
+ : ".\n";
+ }
+
+# Go through each section of the configuration file, validate that
+# each section has the required parameters and complain about unknown
+# parameters. Compile any regular expressions.
+my @sections = $cfg->Sections;
+{
+ my $ok = 1;
+ foreach my $section (@sections)
+ {
+ # First check for any unknown parameters.
+ foreach my $param ($cfg->Parameters($section))
+ {
+ next if $param eq 'match';
+ next if $param eq 'users';
+ next if $param eq 'access';
+ warn "$0: config file `$cfg_filename' section `$section' parameter ",
+ "`$param' is being ignored.\n";
+ $cfg->delval($section, $param);
+ }
+
+ my $access = $cfg->val($section, 'access');
+ if (defined $access)
+ {
+ unless ($access eq ACCESS_READ_ONLY or $access eq ACCESS_READ_WRITE)
+ {
+ warn "$0: config file `$cfg_filename' section `$section' sets ",
+ "`access' to illegal value `$access'.\n";
+ $ok = 0;
+ }
+ }
+ else
+ {
+ warn "$0: config file `$cfg_filename' section `$section' does ",
+ "not set `access' parameter.\n";
+ $ok = 0;
+ }
+
+ my $match_regex = $cfg->val($section, 'match');
+ if (defined $match_regex)
+ {
+ # To help users that automatically write regular expressions
+ # that match the beginning of absolute paths using ^/,
+ # remove the / character because subversion paths, while
+ # they start at the root level, do not begin with a /.
+ $match_regex =~ s#^\^/#^#;
+
+ my $match_re;
+ eval { $match_re = qr/$match_regex/ };
+ if ($@)
+ {
+ warn "$0: config file `$cfg_filename' section `$section' ",
+ "`match' regex `$match_regex' does not compile:\n$@\n";
+ $ok = 0;
+ }
+ else
+ {
+ $cfg->newval($section, 'match_re', $match_re);
+ }
+ }
+ else
+ {
+ warn "$0: config file `$cfg_filename' section `$section' does ",
+ "not set `match' parameter.\n";
+ $ok = 0;
+ }
+ }
+ exit 1 unless $ok;
+}
+
+######################################################################
+# Harvest data using svnlook.
+
+# Change into /tmp so that svnlook diff can create its .svnlook
+# directory.
+my $tmp_dir = '/tmp';
+chdir($tmp_dir)
+ or die "$0: cannot chdir `$tmp_dir': $!\n";
+
+# Get the author from svnlook.
+my @svnlooklines = &read_from_process($svnlook, 'author', $repos, '-t', $txn);
+my $author = shift @svnlooklines;
+unless (length $author)
+ {
+ die "$0: txn `$txn' has no author.\n";
+ }
+
+# Figure out what directories have changed using svnlook..
+my @dirs_changed = &read_from_process($svnlook, 'dirs-changed', $repos,
+ '-t', $txn);
+
+# Lose the trailing slash in the directory names if one exists, except
+# in the case of '/'.
+my $rootchanged = 0;
+for (my $i=0; $i<@dirs_changed; ++$i)
+ {
+ if ($dirs_changed[$i] eq '/')
+ {
+ $rootchanged = 1;
+ }
+ else
+ {
+ $dirs_changed[$i] =~ s#^(.+)[/\\]$#$1#;
+ }
+ }
+
+# Figure out what files have changed using svnlook.
+my @files_changed;
+foreach my $line (&read_from_process($svnlook, 'changed', $repos, '-t', $txn))
+ {
+ # Split the line up into the modification code and path, ignoring
+ # property modifications.
+ if ($line =~ /^.. (.*)$/)
+ {
+ push(@files_changed, $1);
+ }
+ }
+
+# Create the list of all modified paths.
+my @changed = (@dirs_changed, @files_changed);
+
+# There should always be at least one changed path. If there are
+# none, then there maybe something fishy going on, so just exit now
+# indicating that the commit should not proceed.
+unless (@changed)
+ {
+ die "$0: no changed paths found in txn `$txn'.\n";
+ }
+
+######################################################################
+# Populate the permissions table.
+
+# Set a hash keeping track of the access rights to each path. Because
+# this is an access control script, set the default permissions to
+# read-only.
+my %permissions;
+foreach my $path (@changed)
+ {
+ $permissions{$path} = ACCESS_READ_ONLY;
+ }
+
+foreach my $section (@sections)
+ {
+ # Decide if this section should be used. It should be used if
+ # there are no users listed at all for this section, or if there
+ # are users listed and the author is one of them.
+ my $use_this_section;
+
+ # If there are any users listed, then check if the author of this
+ # commit is listed in the list. If not, then delete the section,
+ # because it won't apply.
+ #
+ # The configuration file can list users like this on multiple
+ # lines:
+ # users = joe@mysite.com betty@mysite.com
+ # users = bob@yoursite.com
+
+ # Because of the way Config::IniFiles works, check if there are
+ # any users at all with the scalar return from val() and if there,
+ # then get the array value to get all users.
+ my $users = $cfg->val($section, 'users');
+ if (defined $users and length $users)
+ {
+ my $match_user = 0;
+ foreach my $entry ($cfg->val($section, 'users'))
+ {
+ unless ($match_user)
+ {
+ foreach my $user (split(' ', $entry))
+ {
+ if ($author eq $user)
+ {
+ $match_user = 1;
+ last;
+ }
+ }
+ }
+ }
+
+ $use_this_section = $match_user;
+ }
+ else
+ {
+ $use_this_section = 1;
+ }
+
+ next unless $use_this_section;
+
+ # Go through each modified path and match it to the regular
+ # expression and set the access right if the regular expression
+ # matches.
+ my $access = $cfg->val($section, 'access');
+ my $match_re = $cfg->val($section, 'match_re');
+ foreach my $path (@changed)
+ {
+ $permissions{$path} = $access if $path =~ $match_re;
+ }
+ }
+
+# Go through all the modified paths and see if any permissions are
+# read-only. If so, then fail the commit.
+my @failed_paths;
+foreach my $path (@changed)
+ {
+ if ($permissions{$path} ne ACCESS_READ_WRITE)
+ {
+ push(@failed_paths, $path);
+ }
+ }
+
+if (@failed_paths)
+ {
+ warn "$0: user `$author' does not have permission to commit to ",
+ @failed_paths > 1 ? "these paths:\n " : "this path:\n ",
+ join("\n ", @failed_paths), "\n";
+ exit 1;
+ }
+else
+ {
+ exit 0;
+ }
+
+sub usage
+{
+ warn "@_\n" if @_;
+ die "usage: $0 REPOS TXN-NAME CONF_FILE\n";
+}
+
+sub safe_read_from_pipe
+{
+ unless (@_)
+ {
+ croak "$0: safe_read_from_pipe passed no arguments.\n";
+ }
+ print "Running @_\n";
+ my $pid = open(SAFE_READ, '-|');
+ unless (defined $pid)
+ {
+ die "$0: cannot fork: $!\n";
+ }
+ unless ($pid)
+ {
+ open(STDERR, ">&STDOUT")
+ or die "$0: cannot dup STDOUT: $!\n";
+ exec(@_)
+ or die "$0: cannot exec `@_': $!\n";
+ }
+ my @output;
+ while (<SAFE_READ>)
+ {
+ chomp;
+ push(@output, $_);
+ }
+ close(SAFE_READ);
+ my $result = $?;
+ my $exit = $result >> 8;
+ my $signal = $result & 127;
+ my $cd = $result & 128 ? "with core dump" : "";
+ if ($signal or $cd)
+ {
+ warn "$0: pipe from `@_' failed $cd: exit=$exit signal=$signal\n";
+ }
+ if (wantarray)
+ {
+ return ($result, @output);
+ }
+ else
+ {
+ return $result;
+ }
+}
+
+sub read_from_process
+ {
+ unless (@_)
+ {
+ croak "$0: read_from_process passed no arguments.\n";
+ }
+ my ($status, @output) = &safe_read_from_pipe(@_);
+ if ($status)
+ {
+ if (@output)
+ {
+ die "$0: `@_' failed with this output:\n", join("\n", @output), "\n";
+ }
+ else
+ {
+ die "$0: `@_' failed with no output.\n";
+ }
+ }
+ else
+ {
+ return @output;
+ }
+}
diff --git a/tools/hook-scripts/commit-email.rb b/tools/hook-scripts/commit-email.rb
new file mode 100755
index 0000000..2fa61a0
--- /dev/null
+++ b/tools/hook-scripts/commit-email.rb
@@ -0,0 +1,122 @@
+#!/usr/bin/env ruby
+
+#
+######################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+#
+
+require 'English'
+
+original_argv = ARGV.dup
+argv = []
+
+found_include_option = false
+while (arg = original_argv.shift)
+ if found_include_option
+ $LOAD_PATH.unshift(arg)
+ found_include_option = false
+ else
+ case arg
+ when "-I", "--include"
+ found_include_option = true
+ when /\A-I/, /\A--include=?/
+ path = $POSTMATCH
+ $LOAD_PATH.unshift(path) unless path.empty?
+ else
+ argv << arg
+ end
+ end
+end
+
+def extract_email_address(address)
+ if /<(.+?)>/ =~ address
+ $1
+ else
+ address
+ end
+end
+
+def sendmail(to, from, mail, server=nil, port=nil)
+ server ||= "localhost"
+ from = extract_email_address(from)
+ to = to.collect {|address| extract_email_address(address)}
+ Net::SMTP.start(server, port) do |smtp|
+ smtp.open_message_stream(from, to) do |f|
+ f.print(mail)
+ end
+ end
+end
+
+begin
+ require 'svn/commit-mailer'
+ Svn::Locale.set
+ Svn::CommitMailer.run(argv)
+rescue Exception => error
+ require 'net/smtp'
+ require 'socket'
+
+ to = []
+ subject = "Error"
+ from = "#{ENV['USER']}@#{Socket.gethostname}"
+ server = nil
+ port = nil
+ begin
+ begin
+ Svn::CommitMailer
+ rescue NameError
+ raise OptionParser::ParseError
+ end
+ _, _, _to, options = Svn::CommitMailer.parse(argv)
+ to = [_to]
+ to = options.error_to unless options.error_to.empty?
+ from = options.from || from
+ subject = "#{options.name}: #{subject}" if options.name
+ server = options.server
+ port = options.port
+ rescue OptionParser::MissingArgument
+ argv.delete_if {|arg| $!.args.include?(arg)}
+ retry
+ rescue OptionParser::ParseError
+ if to.empty?
+ _, _, _to, *_ = ARGV.reject {|arg| /^-/.match(arg)}
+ to = [_to]
+ end
+ end
+
+ detail = <<-EOM
+#{error.class}: #{error.message}
+#{error.backtrace.join("\n")}
+EOM
+ to = to.compact
+ if to.empty?
+ STDERR.puts detail
+ else
+ sendmail(to, from, <<-MAIL, server, port)
+MIME-Version: 1.0
+Content-Type: text/plain; charset=us-ascii
+Content-Transfer-Encoding: 7bit
+From: #{from}
+To: #{to.join(', ')}
+Subject: #{subject}
+Date: #{Time.now.rfc2822}
+
+#{detail}
+MAIL
+ end
+end
diff --git a/tools/hook-scripts/control-chars.py b/tools/hook-scripts/control-chars.py
new file mode 100755
index 0000000..17223fe
--- /dev/null
+++ b/tools/hook-scripts/control-chars.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+'''control-chars.py: Subversion repository hook script that rejects filenames
+which contain control characters. Expects to be called like a pre-commit hook:
+ control-chars.py <REPOS-PATH> <TXN-NAME>
+
+Latest version should be available at
+http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/
+
+See validate-files.py for more generic validations.'''
+
+import sys
+import re
+import posixpath
+
+import svn
+import svn.fs
+import svn.repos
+import svn.core
+
+# Can't hurt to disallow chr(0), though the C API will never pass one anyway.
+control_chars = set( [chr(i) for i in range(32)] )
+control_chars.add(chr(127))
+
+def check_node(node, path):
+ "check NODE for control characters. PATH is used for error messages"
+ if node.action == 'A':
+ if any((c in control_chars) for c in node.name):
+ sys.stderr.write("'%s' contains a control character" % path)
+ return 3
+
+def walk_tree(node, path, callback):
+ "Walk NODE"
+ if not node:
+ return 0
+
+ ret_val = callback(node, path)
+ if ret_val > 0:
+ return ret_val
+
+ node = node.child
+ if not node:
+ return 0
+
+ while node:
+ full_path = posixpath.join(path, node.name)
+ ret_val = walk_tree(node, full_path, callback)
+ # If we ran into an error just return up the stack all the way
+ if ret_val > 0:
+ return ret_val
+ node = node.sibling
+
+ return 0
+
+def usage():
+ sys.stderr.write("Invalid arguments, expects to be called like a pre-commit hook.")
+
+def main(ignored_pool, argv):
+ if len(argv) < 3:
+ usage()
+ return 2
+
+ repos_path = svn.core.svn_path_canonicalize(argv[1])
+ txn_name = argv[2]
+
+ if not repos_path or not txn_name:
+ usage()
+ return 2
+
+ repos = svn.repos.svn_repos_open(repos_path)
+ fs = svn.repos.svn_repos_fs(repos)
+ txn = svn.fs.svn_fs_open_txn(fs, txn_name)
+ txn_root = svn.fs.svn_fs_txn_root(txn)
+ base_rev = svn.fs.svn_fs_txn_base_revision(txn)
+ if base_rev is None or base_rev <= svn.core.SVN_INVALID_REVNUM:
+ sys.stderr.write("Transaction '%s' is not based on a revision" % txn_name)
+ return 2
+ base_root = svn.fs.svn_fs_revision_root(fs, base_rev)
+ editor, editor_baton = svn.repos.svn_repos_node_editor(repos, base_root,
+ txn_root)
+ try:
+ svn.repos.svn_repos_replay2(txn_root, "", svn.core.SVN_INVALID_REVNUM,
+ False, editor, editor_baton, None, None)
+ except svn.core.SubversionException as e:
+ # If we get a file not found error then some file has a newline in it and
+ # fsfs's own transaction is now corrupted.
+ if e.apr_err == svn.core.SVN_ERR_FS_NOT_FOUND:
+ match = re.search("path '(.*?)'", e.message)
+ if not match:
+ sys.stderr.write(repr(e))
+ return 2
+ path = match.group(1)
+ sys.stderr.write("Path name that contains '%s' has a newline." % path)
+ return 3
+ # fs corrupt error probably means that there is probably both
+ # file and file\n in the transaction. However, we can't really determine
+ # which files since the transaction is broken. Even if we didn't reject
+ # this it would not be able to be committed. This just gives a better
+ # error message.
+ elif e.apr_err == svn.core.SVN_ERR_FS_CORRUPT:
+ sys.stderr.write("Some path contains a newline causing: %s" % repr(e))
+ return 3
+ else:
+ sys.stderr.write(repr(e))
+ return 2
+ tree = svn.repos.svn_repos_node_from_baton(editor_baton)
+ return walk_tree(tree, "/", check_node)
+
+if __name__ == '__main__':
+ sys.exit(svn.core.run_app(main, sys.argv))
diff --git a/tools/hook-scripts/log-police.py b/tools/hook-scripts/log-police.py
new file mode 100755
index 0000000..4d0ff0f
--- /dev/null
+++ b/tools/hook-scripts/log-police.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# log-police.py: Ensure that log messages end with a single newline.
+# See usage() function for details, or just run with no arguments.
+
+import os
+import sys
+import getopt
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+
+import svn
+import svn.fs
+import svn.repos
+import svn.core
+
+
+def fix_log_message(log_message):
+ """Return a fixed version of LOG_MESSAGE. By default, this just
+ means ensuring that the result ends with exactly one newline and no
+ other whitespace. But if you want to do other kinds of fixups, this
+ function is the place to implement them -- all log message fixing in
+ this script happens here."""
+ return log_message.rstrip() + "\n"
+
+
+def fix_txn(fs, txn_name):
+ "Fix up the log message for txn TXN_NAME in FS. See fix_log_message()."
+ txn = svn.fs.svn_fs_open_txn(fs, txn_name)
+ log_message = svn.fs.svn_fs_txn_prop(txn, "svn:log")
+ if log_message is not None:
+ new_message = fix_log_message(log_message)
+ if new_message != log_message:
+ svn.fs.svn_fs_change_txn_prop(txn, "svn:log", new_message)
+
+
+def fix_rev(fs, revnum):
+ "Fix up the log message for revision REVNUM in FS. See fix_log_message()."
+ log_message = svn.fs.svn_fs_revision_prop(fs, revnum, 'svn:log')
+ if log_message is not None:
+ new_message = fix_log_message(log_message)
+ if new_message != log_message:
+ svn.fs.svn_fs_change_rev_prop(fs, revnum, "svn:log", new_message)
+
+
+def usage_and_exit(error_msg=None):
+ """Write usage information and exit. If ERROR_MSG is provide, that
+ error message is printed first (to stderr), the usage info goes to
+ stderr, and the script exits with a non-zero status. Otherwise,
+ usage info goes to stdout and the script exits with a zero status."""
+ import os.path
+ stream = error_msg and sys.stderr or sys.stdout
+ if error_msg:
+ stream.write("ERROR: %s\n\n" % error_msg)
+ stream.write("USAGE: %s [-t TXN_NAME | -r REV_NUM | --all-revs] REPOS\n"
+ % (os.path.basename(sys.argv[0])))
+ stream.write("""
+Ensure that log messages end with exactly one newline and no other
+whitespace characters. Use as a pre-commit hook by passing '-t TXN_NAME';
+fix up a single revision by passing '-r REV_NUM'; fix up all revisions by
+passing '--all-revs'. (When used as a pre-commit hook, may modify the
+svn:log property on the txn.)
+""")
+ sys.exit(error_msg and 1 or 0)
+
+
+def main(ignored_pool, argv):
+ repos_path = None
+ txn_name = None
+ rev_name = None
+ all_revs = False
+
+ try:
+ opts, args = my_getopt(argv[1:], 't:r:h?', ["help", "all-revs"])
+ except:
+ usage_and_exit("problem processing arguments / options.")
+ for opt, value in opts:
+ if opt == '--help' or opt == '-h' or opt == '-?':
+ usage_and_exit()
+ elif opt == '-t':
+ txn_name = value
+ elif opt == '-r':
+ rev_name = value
+ elif opt == '--all-revs':
+ all_revs = True
+ else:
+ usage_and_exit("unknown option '%s'." % opt)
+
+ if txn_name is not None and rev_name is not None:
+ usage_and_exit("cannot pass both -t and -r.")
+ if txn_name is not None and all_revs:
+ usage_and_exit("cannot pass --all-revs with -t.")
+ if rev_name is not None and all_revs:
+ usage_and_exit("cannot pass --all-revs with -r.")
+ if rev_name is None and txn_name is None and not all_revs:
+ usage_and_exit("must provide exactly one of -r, -t, or --all-revs.")
+ if len(args) != 1:
+ usage_and_exit("only one argument allowed (the repository).")
+
+ repos_path = svn.core.svn_path_canonicalize(args[0])
+
+ # A non-bindings version of this could be implemented by calling out
+ # to 'svnlook getlog' and 'svnadmin setlog'. However, using the
+ # bindings results in much simpler code.
+
+ fs = svn.repos.svn_repos_fs(svn.repos.svn_repos_open(repos_path))
+ if txn_name is not None:
+ fix_txn(fs, txn_name)
+ elif rev_name is not None:
+ fix_rev(fs, int(rev_name))
+ elif all_revs:
+ # Do it such that if we're running on a live repository, we'll
+ # catch up even with commits that came in after we started.
+ last_youngest = 0
+ while True:
+ youngest = svn.fs.svn_fs_youngest_rev(fs)
+ if youngest >= last_youngest:
+ for this_rev in range(last_youngest, youngest + 1):
+ fix_rev(fs, this_rev)
+ last_youngest = youngest + 1
+ else:
+ break
+
+
+if __name__ == '__main__':
+ sys.exit(svn.core.run_app(main, sys.argv))
diff --git a/tools/hook-scripts/mailer/mailer.conf.example b/tools/hook-scripts/mailer/mailer.conf.example
new file mode 100644
index 0000000..3887e6b
--- /dev/null
+++ b/tools/hook-scripts/mailer/mailer.conf.example
@@ -0,0 +1,374 @@
+#
+# mailer.conf: example configuration file for mailer.py
+#
+# $Id: mailer.conf.example 1777846 2017-01-07 19:35:49Z julianfoad $
+
+[general]
+
+# The [general].diff option is now DEPRECATED.
+# Instead use [defaults].diff .
+
+#
+# One delivery method must be chosen. mailer.py will prefer using the
+# "mail_command" option. If that option is empty or commented out,
+# then it checks whether the "smtp_hostname" option has been
+# specified. If neither option is set, then the commit message is
+# delivered to stdout.
+#
+
+# This command will be invoked with destination addresses on the command
+# line, and the message piped into it.
+#mail_command = /usr/sbin/sendmail
+
+# This option specifies the hostname for delivery via SMTP.
+#smtp_hostname = localhost
+
+# Username and password for SMTP servers requiring authorisation.
+#smtp_username = example
+#smtp_password = example
+
+# This option specifies whether to use SSL from the beginning of the SMTP
+# connection.
+#smtp_ssl = yes
+
+# --------------------------------------------------------------------------
+
+#
+# CONFIGURATION GROUPS
+#
+# Any sections other than [general], [defaults], [maps] and sections
+# referred to within [maps] are considered to be user-defined groups
+# which override values in the [defaults] section.
+# These groups are selected using the following three options:
+#
+# for_repos
+# for_paths
+# search_logmsg
+#
+# Each option specifies a regular expression. for_repos is matched
+# against the absolute path to the repository the mailer is operating
+# against. for_paths is matched against *every* path (files and
+# dirs) that was modified during the commit.
+#
+# The options specified in the [defaults] section are always selected. The
+# presence of a non-matching for_repos has no relevance. Note that you may
+# still use a for_repos value to extract useful information (more on this
+# later). Any user-defined groups without a for_repos, or which contains
+# a matching for_repos, will be selected for potential use.
+#
+# The subset of user-defined groups identified by the repository are further
+# refined based on the for_paths option. A group is selected if at least
+# one path(*) in the commit matches the for_paths regular expression. Note
+# that the paths are relative to the root of the repository and do not
+# have a leading slash.
+#
+# (*) Actually, each path will select just one group. Thus, it is possible
+# that one group will match against all paths, while another group matches
+# none of the paths, even though its for_paths would have selected some of
+# the paths in the commit.
+#
+# search_logmsg specifies a regular expression to match against the
+# log message. If the regular expression does not match the log
+# message, the group is not matched; if the regular expression matches
+# once, the group is used. If there are multiple matches, each
+# successful match generates another group-match (this is useful if
+# "named groups" are used). If search_logmsg is not used, no log
+# message filtering is performed.
+#
+# Groups are matched in no particular order. Do not depend upon their
+# order within this configuration file. The values from [defaults] will
+# be used if no group is matched or an option in a group does not override
+# the corresponding value from [defaults].
+#
+# Generally, a commit email is generated for each group that has been
+# selected. The script will try to minimize mails, so it may be possible
+# that a single message will be generated to multiple recipients. In
+# addition, it is possible for multiple messages per group to be generated,
+# based on the various substitutions that are performed (see the following
+# section).
+#
+#
+# SUBSTITUTIONS
+#
+# The regular expressions can use the "named group" syntax to extract
+# interesting pieces of the repository or commit path. These named values
+# can then be substituted in the option values during mail generation.
+#
+# For example, let's say that you have a repository with a top-level
+# directory named "clients", with several client projects underneath:
+#
+# REPOS/
+# clients/
+# gsvn/
+# rapidsvn/
+# winsvn/
+#
+# The client name can be extracted with a regular expression like:
+#
+# for_paths = clients/(?P<client>[^/]*)($|/)
+#
+# The substitution is performed using Python's dict-based string
+# interpolation syntax:
+#
+# to_addr = commits@%(client)s.tigris.org
+#
+# The %(NAME)s syntax will substitute whatever value for NAME was captured
+# in the for_repos and for_paths regular expressions. The set of names
+# available is obtained from the following set of regular expressions:
+#
+# [defaults].for_repos (if present)
+# [GROUP].for_repos (if present in the user-defined group "GROUP")
+# [GROUP].for_paths (if present in the user-defined group "GROUP")
+#
+# The names from the regexes later in the list override the earlier names.
+# If none of the groups match, but a for_paths is present in [defaults],
+# then its extracted names will be available.
+#
+# Further suppose you want to match bug-ids in log messages:
+#
+# search_logmsg = (?P<bugid>(ProjA|ProjB)#\d)
+#
+# The bugids would be of the form ProjA#123 and ProjB#456. In this
+# case, each time the regular expression matches, another match group
+# will be generated. Thus, if you use:
+#
+# commit_subject_prefix = %(bugid)s:
+#
+# Then, a log message such as "Fixes ProjA#123 and ProjB#234" would
+# match both bug-ids, and two emails would be generated - one with
+# subject "ProjA#123: <...>" and "ProjB#234: <...>".
+#
+# Note that each unique set of names for substitution will generate an
+# email. In the above example, if a commit modified files in all three
+# client subdirectories, then an email will be sent to all three commits@
+# mailing lists on tigris.org.
+#
+# The substitution variable "author" is provided by default, and is set
+# to the author name passed to mailer.py for revprop changes or the
+# author defined for a revision; if neither is available, then it is
+# set to "no_author". Thus, you might define a line like:
+#
+# from_addr = %(author)s@example.com
+#
+# The substitution variable "repos_basename" is provided, and is set to
+# the directory name of the repository. This can be useful to set
+# a custom subject that can be re-used in multiple repositories:
+#
+# commit_subject_prefix = [svn-%(repos_basename)s]
+#
+# For example if the repository is at /path/to/repo/project-x then
+# the subject of commit emails will be prefixed with [svn-project-x]
+#
+#
+# SUMMARY
+#
+# While mailer.py will work to minimize the number of mail messages
+# generated, a single commit can potentially generate a large number
+# of variants of a commit message. The criteria for generating messages
+# is based on:
+#
+# groups selected by for_repos
+# groups selected by for_paths
+# unique sets of parameters extracted by the above regular expressions
+#
+
+[defaults]
+
+# This is not passed to the shell, so do not use shell metacharacters.
+# The command is split around whitespace, so if you want to include
+# whitespace in the command, then ### something ###.
+diff = /usr/bin/diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s
+
+# The default prefix for the Subject: header for commits.
+commit_subject_prefix =
+
+# The default prefix for the Subject: header for propchanges.
+propchange_subject_prefix =
+
+# The default prefix for the Subject: header for locks.
+lock_subject_prefix =
+
+# The default prefix for the Subject: header for unlocks.
+unlock_subject_prefix =
+
+
+# The default From: address for messages. If the from_addr is not
+# specified or it is specified but there is no text after the `=',
+# then the revision's author is used as the from address. If the
+# revision author is not specified, such as when a commit is done
+# without requiring authentication and authorization, then the string
+# 'no_author' is used. You can specify a default from_addr here and
+# if you want to have a particular for_repos group use the author as
+# the from address, you can use "from_addr =".
+from_addr = invalid@example.com
+
+# The default To: addresses for message. One or more addresses,
+# separated by whitespace (no commas).
+# NOTE: If you want to use a different character for separating the
+# addresses put it in front of the addresses included in square
+# brackets '[ ]'.
+to_addr = invalid@example.com
+
+# If this is set, then a Reply-To: will be inserted into the message.
+reply_to =
+
+# Specify which types of repository changes mailer.py will create
+# diffs for. Valid options are any combination of
+# 'add copy modify delete', or 'none' to never create diffs.
+# If the generate_diffs option is empty, the selection is controlled
+# by the deprecated options suppress_deletes and suppress_adds.
+# Note that this only affects the display of diffs - all changes are
+# mentioned in the summary of changed paths at the top of the message,
+# regardless of this option's value.
+# Meaning of the possible values:
+# add: generates diffs for all added paths
+# copy: generates diffs for all copied paths
+# which were not changed after copying
+# modify: generates diffs for all modified paths, including paths that were
+# copied and modified afterwards (within the same commit)
+# delete: generates diffs for all removed paths
+generate_diffs = add copy modify
+
+# Commit URL construction. This adds a URL to the top of the message
+# that can lead the reader to a Trac, ViewVC or other view of the
+# commit as a whole.
+#
+# The available substitution variable is: rev
+#commit_url = http://diffs.server.com/trac/software/changeset/%(rev)s
+
+# Diff URL construction. For the configured diff URL types, the diff
+# section (which follows the message header) will include the URL
+# relevant to the change type, even if actual diff generation for that
+# change type is disabled (per the generate_diffs option).
+#
+# Available substitution variables are: path, base_path, rev, base_rev
+#diff_add_url =
+#diff_copy_url =
+#diff_modify_url = http://diffs.server.com/?p1=%(base_path)s&p2=%(path)s
+#diff_delete_url =
+
+# When set to "yes", the mailer will suppress the creation of a diff which
+# deletes all the lines in the file. If this is set to anything else, or
+# is simply commented out, then the diff will be inserted. Note that the
+# deletion is always mentioned in the message header, regardless of this
+# option's value.
+### DEPRECATED (if generate_diffs is not empty, this option is ignored)
+#suppress_deletes = yes
+
+# When set to "yes", the mailer will suppress the creation of a diff which
+# adds all the lines in the file. If this is set to anything else, or
+# is simply commented out, then the diff will be inserted. Note that the
+# addition is always mentioned in the message header, regardless of this
+# option's value.
+### DEPRECATED (if generate_diffs is not empty, this option is ignored)
+#suppress_adds = yes
+
+# A revision is reported on if any of its changed paths match the
+# for_paths option. If only some of the changed paths of a revision
+# match, this variable controls the behaviour for the non-matching
+# paths. Possible values are:
+#
+# yes: (Default) Show in both summary and diffs.
+# summary: Show the changed paths in the summary, but omit the diffs.
+# no: Show nothing more than a note saying "and changes in other areas"
+#
+show_nonmatching_paths = yes
+
+# Subject line length limit. The generated subject line will be truncated
+# and terminated with "...", to remain within the specified maximum length.
+# Set to 0 to turn off.
+#truncate_subject = 200
+
+# --------------------------------------------------------------------------
+
+[maps]
+
+#
+# This section can be used define rewrite mappings for option values. It
+# is typically used for computing from/to addresses, but can actually be
+# used to remap values for any option in this file.
+#
+# The mappings are global for the entire configuration file. There is
+# no group-specific mapping capability. For each mapping that you want
+# to perform, you will provide the name of the option (e.g. from_addr)
+# and a specification of how to perform those mappings. These declarations
+# are made here in the [maps] section.
+#
+# When an option is accessed, the value is loaded from the configuration
+# file and all %(NAME)s substitutions are performed. The resulting value
+# is then passed through the map. If a map entry is not available for
+# the value, then it will be used unchanged.
+#
+# NOTES: - Avoid using map substitution names which differ only in case.
+# Unexpected results may occur.
+# - A colon ':' is also considered as separator between option and
+# value (keep this in mind when trying to map a file path under
+# windows).
+#
+# The format to declare a map is:
+#
+# option_name_to_remap = mapping_specification
+#
+# At the moment, there is only one type of mapping specification:
+#
+# mapping_specification = '[' sectionname ']'
+#
+# This will use the given section to map values. The option names in
+# the section are the input values, and the option values are the result.
+#
+
+#
+# EXAMPLE:
+#
+# We have two projects using two repositories. The name of the repos
+# does not easily map to their commit mailing lists, so we will use
+# a mapping to go from a project name (extracted from the repository
+# path) to their commit list. The committers also need a special
+# mapping to derive their email address from their repository username.
+#
+# [projects]
+# for_repos = .*/(?P<project>.*)
+# from_addr = %(author)s
+# to_addr = %(project)s
+#
+# [maps]
+# from_addr = [authors]
+# to_addr = [mailing-lists]
+#
+# [authors]
+# john = jconnor@example.com
+# sarah = sconnor@example.com
+#
+# [mailing-lists]
+# t600 = spottable-commits@example.com
+# tx = hotness-commits@example.com
+#
+
+# --------------------------------------------------------------------------
+
+#
+# [example-group]
+# # send notifications if any web pages are changed
+# for_paths = .*\.html
+# # set a custom prefix
+# commit_subject_prefix = [commit]
+# propchange_subject_prefix = [propchange]
+# # override the default, sending these elsewhere
+# to_addr = www-commits@example.com
+# # use the revision author as the from address
+# from_addr =
+# # use a custom diff program for this group
+# diff = /usr/bin/my-diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s
+#
+# [another-example]
+# # commits to personal repositories should go to that person
+# for_repos = /home/(?P<who>[^/]*)/repos
+# to_addr = %(who)s@example.com
+#
+# [issuetracker]
+# search_logmsg = (?P<bugid>(?P<project>projecta|projectb|projectc)#\d+)
+# # (or, use a mapping if the bug-id to email address is not this trivial)
+# to_addr = %(project)s-tracker@example.com
+# commit_subject_prefix = %(bugid)s:
+# propchange_subject_prefix = %(bugid)s:
+
diff --git a/tools/hook-scripts/mailer/mailer.py b/tools/hook-scripts/mailer/mailer.py
new file mode 100755
index 0000000..c8d658c
--- /dev/null
+++ b/tools/hook-scripts/mailer/mailer.py
@@ -0,0 +1,1483 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# mailer.py: send email describing a commit
+#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/hook-scripts/mailer/mailer.py $
+# $LastChangedDate: 2017-01-07 19:35:49 +0000 (Sat, 07 Jan 2017) $
+# $LastChangedBy: julianfoad $
+# $LastChangedRevision: 1777846 $
+#
+# USAGE: mailer.py commit REPOS REVISION [CONFIG-FILE]
+# mailer.py propchange REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE]
+# mailer.py propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION \
+# [CONFIG-FILE]
+# mailer.py lock REPOS AUTHOR [CONFIG-FILE]
+# mailer.py unlock REPOS AUTHOR [CONFIG-FILE]
+#
+# Using CONFIG-FILE, deliver an email describing the changes between
+# REV and REV-1 for the repository REPOS.
+#
+# ACTION was added as a fifth argument to the post-revprop-change hook
+# in Subversion 1.2.0. Its value is one of 'A', 'M' or 'D' to indicate
+# if the property was added, modified or deleted, respectively.
+#
+# See _MIN_SVN_VERSION below for which version of Subversion's Python
+# bindings are required by this version of mailer.py.
+
+import os
+import sys
+try:
+ # Python >=3.0
+ import configparser
+ from urllib.parse import quote as urllib_parse_quote
+except ImportError:
+ # Python <3.0
+ import ConfigParser as configparser
+ from urllib import quote as urllib_parse_quote
+import time
+import subprocess
+if sys.version_info[0] >= 3:
+ # Python >=3.0
+ from io import StringIO
+else:
+ # Python <3.0
+ from cStringIO import StringIO
+import smtplib
+import re
+import tempfile
+
+# Minimal version of Subversion's bindings required
+_MIN_SVN_VERSION = [1, 5, 0]
+
+# Import the Subversion Python bindings, making sure they meet our
+# minimum version requirements.
+try:
+ import svn.fs
+ import svn.delta
+ import svn.repos
+ import svn.core
+except ImportError:
+ sys.stderr.write(
+ "You need version %s or better of the Subversion Python bindings.\n" \
+ % ".".join([str(x) for x in _MIN_SVN_VERSION]))
+ sys.exit(1)
+if _MIN_SVN_VERSION > [svn.core.SVN_VER_MAJOR,
+ svn.core.SVN_VER_MINOR,
+ svn.core.SVN_VER_PATCH]:
+ sys.stderr.write(
+ "You need version %s or better of the Subversion Python bindings.\n" \
+ % ".".join([str(x) for x in _MIN_SVN_VERSION]))
+ sys.exit(1)
+
+
+SEPARATOR = '=' * 78
+
+def main(pool, cmd, config_fname, repos_dir, cmd_args):
+ ### TODO: Sanity check the incoming args
+
+ if cmd == 'commit':
+ revision = int(cmd_args[0])
+ repos = Repository(repos_dir, revision, pool)
+ cfg = Config(config_fname, repos,
+ {'author': repos.author,
+ 'repos_basename': os.path.basename(repos.repos_dir)
+ })
+ messenger = Commit(pool, cfg, repos)
+ elif cmd == 'propchange' or cmd == 'propchange2':
+ revision = int(cmd_args[0])
+ author = cmd_args[1]
+ propname = cmd_args[2]
+ action = (cmd == 'propchange2' and cmd_args[3] or 'A')
+ repos = Repository(repos_dir, revision, pool)
+ # Override the repos revision author with the author of the propchange
+ repos.author = author
+ cfg = Config(config_fname, repos,
+ {'author': author,
+ 'repos_basename': os.path.basename(repos.repos_dir)
+ })
+ messenger = PropChange(pool, cfg, repos, author, propname, action)
+ elif cmd == 'lock' or cmd == 'unlock':
+ author = cmd_args[0]
+ repos = Repository(repos_dir, 0, pool) ### any old revision will do
+ # Override the repos revision author with the author of the lock/unlock
+ repos.author = author
+ cfg = Config(config_fname, repos,
+ {'author': author,
+ 'repos_basename': os.path.basename(repos.repos_dir)
+ })
+ messenger = Lock(pool, cfg, repos, author, cmd == 'lock')
+ else:
+ raise UnknownSubcommand(cmd)
+
+ messenger.generate()
+
+
+def remove_leading_slashes(path):
+ while path and path[0] == '/':
+ path = path[1:]
+ return path
+
+
+class OutputBase:
+ "Abstract base class to formalize the interface of output methods"
+
+ def __init__(self, cfg, repos, prefix_param):
+ self.cfg = cfg
+ self.repos = repos
+ self.prefix_param = prefix_param
+ self._CHUNKSIZE = 128 * 1024
+
+ # This is a public member variable. This must be assigned a suitable
+ # piece of descriptive text before make_subject() is called.
+ self.subject = ""
+
+ def make_subject(self, group, params):
+ prefix = self.cfg.get(self.prefix_param, group, params)
+ if prefix:
+ subject = prefix + ' ' + self.subject
+ else:
+ subject = self.subject
+
+ try:
+ truncate_subject = int(
+ self.cfg.get('truncate_subject', group, params))
+ except ValueError:
+ truncate_subject = 0
+
+ if truncate_subject and len(subject) > truncate_subject:
+ subject = subject[:(truncate_subject - 3)] + "..."
+ return subject
+
+ def start(self, group, params):
+ """Override this method.
+ Begin writing an output representation. GROUP is the name of the
+ configuration file group which is causing this output to be produced.
+ PARAMS is a dictionary of any named subexpressions of regular expressions
+ defined in the configuration file, plus the key 'author' contains the
+ author of the action being reported."""
+ raise NotImplementedError
+
+ def finish(self):
+ """Override this method.
+ Flush any cached information and finish writing the output
+ representation."""
+ raise NotImplementedError
+
+ def write(self, output):
+ """Override this method.
+ Append the literal text string OUTPUT to the output representation."""
+ raise NotImplementedError
+
+ def run(self, cmd):
+ """Override this method, if the default implementation is not sufficient.
+ Execute CMD, writing the stdout produced to the output representation."""
+ # By default we choose to incorporate child stderr into the output
+ pipe_ob = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ close_fds=sys.platform != "win32")
+
+ buf = pipe_ob.stdout.read(self._CHUNKSIZE)
+ while buf:
+ self.write(buf)
+ buf = pipe_ob.stdout.read(self._CHUNKSIZE)
+
+ # wait on the child so we don't end up with a billion zombies
+ pipe_ob.wait()
+
+
+class MailedOutput(OutputBase):
+ def __init__(self, cfg, repos, prefix_param):
+ OutputBase.__init__(self, cfg, repos, prefix_param)
+
+ def start(self, group, params):
+ # whitespace (or another character) separated list of addresses
+ # which must be split into a clean list
+ to_addr_in = self.cfg.get('to_addr', group, params)
+ # if list of addresses starts with '[.]'
+ # use the character between the square brackets as split char
+ # else use whitespaces
+ if len(to_addr_in) >= 3 and to_addr_in[0] == '[' \
+ and to_addr_in[2] == ']':
+ self.to_addrs = \
+ [_f for _f in to_addr_in[3:].split(to_addr_in[1]) if _f]
+ else:
+ self.to_addrs = [_f for _f in to_addr_in.split() if _f]
+ self.from_addr = self.cfg.get('from_addr', group, params) \
+ or self.repos.author or 'no_author'
+ # if the from_addr (also) starts with '[.]' (may happen if one
+ # map is used for both to_addr and from_addr) remove '[.]'
+ if len(self.from_addr) >= 3 and self.from_addr[0] == '[' \
+ and self.from_addr[2] == ']':
+ self.from_addr = self.from_addr[3:]
+ self.reply_to = self.cfg.get('reply_to', group, params)
+ # if the reply_to (also) starts with '[.]' (may happen if one
+ # map is used for both to_addr and reply_to) remove '[.]'
+ if len(self.reply_to) >= 3 and self.reply_to[0] == '[' \
+ and self.reply_to[2] == ']':
+ self.reply_to = self.reply_to[3:]
+
+ def _rfc2047_encode(self, hdr):
+ # Return the result of splitting HDR into tokens (on space
+ # characters), encoding (per RFC2047) each token as necessary, and
+ # slapping 'em back to together again.
+ from email.Header import Header
+
+ def _maybe_encode_header(hdr_token):
+ try:
+ hdr_token.encode('ascii')
+ return hdr_token
+ except UnicodeError:
+ return Header(hdr_token, 'utf-8').encode()
+
+ return ' '.join(map(_maybe_encode_header, hdr.split()))
+
+ def mail_headers(self, group, params):
+ from email import Utils
+
+ subject = self._rfc2047_encode(self.make_subject(group, params))
+ from_hdr = self._rfc2047_encode(self.from_addr)
+ to_hdr = self._rfc2047_encode(', '.join(self.to_addrs))
+
+ hdrs = 'From: %s\n' \
+ 'To: %s\n' \
+ 'Subject: %s\n' \
+ 'Date: %s\n' \
+ 'Message-ID: %s\n' \
+ 'MIME-Version: 1.0\n' \
+ 'Content-Type: text/plain; charset=UTF-8\n' \
+ 'Content-Transfer-Encoding: 8bit\n' \
+ 'X-Svn-Commit-Project: %s\n' \
+ 'X-Svn-Commit-Author: %s\n' \
+ 'X-Svn-Commit-Revision: %d\n' \
+ 'X-Svn-Commit-Repository: %s\n' \
+ % (from_hdr, to_hdr, subject,
+ Utils.formatdate(), Utils.make_msgid(), group,
+ self.repos.author or 'no_author', self.repos.rev,
+ os.path.basename(self.repos.repos_dir))
+ if self.reply_to:
+ hdrs = '%sReply-To: %s\n' % (hdrs, self.reply_to)
+ return hdrs + '\n'
+
+
+class SMTPOutput(MailedOutput):
+ "Deliver a mail message to an MTA using SMTP."
+
+ def start(self, group, params):
+ MailedOutput.start(self, group, params)
+
+ self.buffer = StringIO()
+ self.write = self.buffer.write
+
+ self.write(self.mail_headers(group, params))
+
+ def finish(self):
+ if self.cfg.is_set('general.smtp_ssl') and self.cfg.general.smtp_ssl == 'yes':
+ server = smtplib.SMTP_SSL(self.cfg.general.smtp_hostname)
+ else:
+ server = smtplib.SMTP(self.cfg.general.smtp_hostname)
+ if self.cfg.is_set('general.smtp_username'):
+ server.login(self.cfg.general.smtp_username,
+ self.cfg.general.smtp_password)
+ server.sendmail(self.from_addr, self.to_addrs, self.buffer.getvalue())
+ server.quit()
+
+
+class StandardOutput(OutputBase):
+ "Print the commit message to stdout."
+
+ def __init__(self, cfg, repos, prefix_param):
+ OutputBase.__init__(self, cfg, repos, prefix_param)
+ self.write = sys.stdout.write
+
+ def start(self, group, params):
+ self.write("Group: " + (group or "defaults") + "\n")
+ self.write("Subject: " + self.make_subject(group, params) + "\n\n")
+
+ def finish(self):
+ pass
+
+
+class PipeOutput(MailedOutput):
+ "Deliver a mail message to an MTA via a pipe."
+
+ def __init__(self, cfg, repos, prefix_param):
+ MailedOutput.__init__(self, cfg, repos, prefix_param)
+
+ # figure out the command for delivery
+ self.cmd = cfg.general.mail_command.split()
+
+ def start(self, group, params):
+ MailedOutput.start(self, group, params)
+
+ ### gotta fix this. this is pretty specific to sendmail and qmail's
+ ### mailwrapper program. should be able to use option param substitution
+ cmd = self.cmd + [ '-f', self.from_addr ] + self.to_addrs
+
+ # construct the pipe for talking to the mailer
+ self.pipe = subprocess.Popen(cmd, stdin=subprocess.PIPE,
+ close_fds=sys.platform != "win32")
+ self.write = self.pipe.stdin.write
+
+ # start writing out the mail message
+ self.write(self.mail_headers(group, params))
+
+ def finish(self):
+ # signal that we're done sending content
+ self.pipe.stdin.close()
+
+ # wait to avoid zombies
+ self.pipe.wait()
+
+
+class Messenger:
+ def __init__(self, pool, cfg, repos, prefix_param):
+ self.pool = pool
+ self.cfg = cfg
+ self.repos = repos
+
+ if cfg.is_set('general.mail_command'):
+ cls = PipeOutput
+ elif cfg.is_set('general.smtp_hostname'):
+ cls = SMTPOutput
+ else:
+ cls = StandardOutput
+
+ self.output = cls(cfg, repos, prefix_param)
+
+
+class Commit(Messenger):
+ def __init__(self, pool, cfg, repos):
+ Messenger.__init__(self, pool, cfg, repos, 'commit_subject_prefix')
+
+ # get all the changes and sort by path
+ editor = svn.repos.ChangeCollector(repos.fs_ptr, repos.root_this, \
+ self.pool)
+ e_ptr, e_baton = svn.delta.make_editor(editor, self.pool)
+ svn.repos.replay2(repos.root_this, "", svn.core.SVN_INVALID_REVNUM, 1, e_ptr, e_baton, None, self.pool)
+
+ self.changelist = sorted(editor.get_changes().items())
+
+ log = repos.get_rev_prop(svn.core.SVN_PROP_REVISION_LOG) or ''
+
+ # collect the set of groups and the unique sets of params for the options
+ self.groups = { }
+ for path, change in self.changelist:
+ for (group, params) in self.cfg.which_groups(path, log):
+ # turn the params into a hashable object and stash it away
+ param_list = sorted(params.items())
+ # collect the set of paths belonging to this group
+ if (group, tuple(param_list)) in self.groups:
+ old_param, paths = self.groups[group, tuple(param_list)]
+ else:
+ paths = { }
+ paths[path] = None
+ self.groups[group, tuple(param_list)] = (params, paths)
+
+ # figure out the changed directories
+ dirs = { }
+ for path, change in self.changelist:
+ if change.item_kind == svn.core.svn_node_dir:
+ dirs[path] = None
+ else:
+ idx = path.rfind('/')
+ if idx == -1:
+ dirs[''] = None
+ else:
+ dirs[path[:idx]] = None
+
+ dirlist = list(dirs.keys())
+
+ commondir, dirlist = get_commondir(dirlist)
+
+ # compose the basic subject line. later, we can prefix it.
+ dirlist.sort()
+ dirlist = ' '.join(dirlist)
+ if commondir:
+ self.output.subject = 'r%d - in %s: %s' % (repos.rev, commondir, dirlist)
+ else:
+ self.output.subject = 'r%d - %s' % (repos.rev, dirlist)
+
+ def generate(self):
+ "Generate email for the various groups and option-params."
+
+ ### the groups need to be further compressed. if the headers and
+ ### body are the same across groups, then we can have multiple To:
+ ### addresses. SMTPOutput holds the entire message body in memory,
+ ### so if the body doesn't change, then it can be sent N times
+ ### rather than rebuilding it each time.
+
+ subpool = svn.core.svn_pool_create(self.pool)
+
+ # build a renderer, tied to our output stream
+ renderer = TextCommitRenderer(self.output)
+
+ for (group, param_tuple), (params, paths) in self.groups.items():
+ self.output.start(group, params)
+
+ # generate the content for this group and set of params
+ generate_content(renderer, self.cfg, self.repos, self.changelist,
+ group, params, paths, subpool)
+
+ self.output.finish()
+ svn.core.svn_pool_clear(subpool)
+
+ svn.core.svn_pool_destroy(subpool)
+
+
+class PropChange(Messenger):
+ def __init__(self, pool, cfg, repos, author, propname, action):
+ Messenger.__init__(self, pool, cfg, repos, 'propchange_subject_prefix')
+ self.author = author
+ self.propname = propname
+ self.action = action
+
+ # collect the set of groups and the unique sets of params for the options
+ self.groups = { }
+ for (group, params) in self.cfg.which_groups('', None):
+ # turn the params into a hashable object and stash it away
+ param_list = sorted(params.items())
+ self.groups[group, tuple(param_list)] = params
+
+ self.output.subject = 'r%d - %s' % (repos.rev, propname)
+
+ def generate(self):
+ actions = { 'A': 'added', 'M': 'modified', 'D': 'deleted' }
+ for (group, param_tuple), params in self.groups.items():
+ self.output.start(group, params)
+ self.output.write('Author: %s\n'
+ 'Revision: %s\n'
+ 'Property Name: %s\n'
+ 'Action: %s\n'
+ '\n'
+ % (self.author, self.repos.rev, self.propname,
+ actions.get(self.action, 'Unknown (\'%s\')' \
+ % self.action)))
+ if self.action == 'A' or self.action not in actions:
+ self.output.write('Property value:\n')
+ propvalue = self.repos.get_rev_prop(self.propname)
+ self.output.write(propvalue)
+ elif self.action == 'M':
+ self.output.write('Property diff:\n')
+ tempfile1 = tempfile.NamedTemporaryFile()
+ tempfile1.write(sys.stdin.read())
+ tempfile1.flush()
+ tempfile2 = tempfile.NamedTemporaryFile()
+ tempfile2.write(self.repos.get_rev_prop(self.propname))
+ tempfile2.flush()
+ self.output.run(self.cfg.get_diff_cmd(group, {
+ 'label_from' : 'old property value',
+ 'label_to' : 'new property value',
+ 'from' : tempfile1.name,
+ 'to' : tempfile2.name,
+ }))
+ self.output.finish()
+
+
+def get_commondir(dirlist):
+ """Figure out the common portion/parent (commondir) of all the paths
+ in DIRLIST and return a tuple consisting of commondir, dirlist. If
+ a commondir is found, the dirlist returned is rooted in that
+ commondir. If no commondir is found, dirlist is returned unchanged,
+ and commondir is the empty string."""
+ if len(dirlist) < 2 or '/' in dirlist:
+ commondir = ''
+ newdirs = dirlist
+ else:
+ common = dirlist[0].split('/')
+ for j in range(1, len(dirlist)):
+ d = dirlist[j]
+ parts = d.split('/')
+ for i in range(len(common)):
+ if i == len(parts) or common[i] != parts[i]:
+ del common[i:]
+ break
+ commondir = '/'.join(common)
+ if commondir:
+ # strip the common portion from each directory
+ l = len(commondir) + 1
+ newdirs = [ ]
+ for d in dirlist:
+ if d == commondir:
+ newdirs.append('.')
+ else:
+ newdirs.append(d[l:])
+ else:
+ # nothing in common, so reset the list of directories
+ newdirs = dirlist
+
+ return commondir, newdirs
+
+
+class Lock(Messenger):
+ def __init__(self, pool, cfg, repos, author, do_lock):
+ self.author = author
+ self.do_lock = do_lock
+
+ Messenger.__init__(self, pool, cfg, repos,
+ (do_lock and 'lock_subject_prefix'
+ or 'unlock_subject_prefix'))
+
+ # read all the locked paths from STDIN and strip off the trailing newlines
+ self.dirlist = [x.rstrip() for x in sys.stdin.readlines()]
+
+ # collect the set of groups and the unique sets of params for the options
+ self.groups = { }
+ for path in self.dirlist:
+ for (group, params) in self.cfg.which_groups(path, None):
+ # turn the params into a hashable object and stash it away
+ param_list = sorted(params.items())
+ # collect the set of paths belonging to this group
+ if (group, tuple(param_list)) in self.groups:
+ old_param, paths = self.groups[group, tuple(param_list)]
+ else:
+ paths = { }
+ paths[path] = None
+ self.groups[group, tuple(param_list)] = (params, paths)
+
+ commondir, dirlist = get_commondir(self.dirlist)
+
+ # compose the basic subject line. later, we can prefix it.
+ dirlist.sort()
+ dirlist = ' '.join(dirlist)
+ if commondir:
+ self.output.subject = '%s: %s' % (commondir, dirlist)
+ else:
+ self.output.subject = '%s' % (dirlist)
+
+ # The lock comment is the same for all paths, so we can just pull
+ # the comment for the first path in the dirlist and cache it.
+ self.lock = svn.fs.svn_fs_get_lock(self.repos.fs_ptr,
+ self.dirlist[0], self.pool)
+
+ def generate(self):
+ for (group, param_tuple), (params, paths) in self.groups.items():
+ self.output.start(group, params)
+
+ self.output.write('Author: %s\n'
+ '%s paths:\n' %
+ (self.author, self.do_lock and 'Locked' or 'Unlocked'))
+
+ self.dirlist.sort()
+ for dir in self.dirlist:
+ self.output.write(' %s\n\n' % dir)
+
+ if self.do_lock:
+ self.output.write('Comment:\n%s\n' % (self.lock.comment or ''))
+
+ self.output.finish()
+
+
+class DiffSelections:
+ def __init__(self, cfg, group, params):
+ self.add = False
+ self.copy = False
+ self.delete = False
+ self.modify = False
+
+ gen_diffs = cfg.get('generate_diffs', group, params)
+
+ ### Do a little dance for deprecated options. Note that even if you
+ ### don't have an option anywhere in your configuration file, it
+ ### still gets returned as non-None.
+ if len(gen_diffs):
+ list = gen_diffs.split(" ")
+ for item in list:
+ if item == 'add':
+ self.add = True
+ if item == 'copy':
+ self.copy = True
+ if item == 'delete':
+ self.delete = True
+ if item == 'modify':
+ self.modify = True
+ else:
+ self.add = True
+ self.copy = True
+ self.delete = True
+ self.modify = True
+ ### These options are deprecated
+ suppress = cfg.get('suppress_deletes', group, params)
+ if suppress == 'yes':
+ self.delete = False
+ suppress = cfg.get('suppress_adds', group, params)
+ if suppress == 'yes':
+ self.add = False
+
+
+class DiffURLSelections:
+ def __init__(self, cfg, group, params):
+ self.cfg = cfg
+ self.group = group
+ self.params = params
+
+ def _get_url(self, action, repos_rev, change):
+ # The parameters for the URLs generation need to be placed in the
+ # parameters for the configuration module, otherwise we may get
+ # KeyError exceptions.
+ params = self.params.copy()
+ params['path'] = change.path and urllib_parse_quote(change.path) or None
+ params['base_path'] = change.base_path and urllib_parse_quote(change.base_path) \
+ or None
+ params['rev'] = repos_rev
+ params['base_rev'] = change.base_rev
+
+ return self.cfg.get("diff_%s_url" % action, self.group, params)
+
+ def get_add_url(self, repos_rev, change):
+ return self._get_url('add', repos_rev, change)
+
+ def get_copy_url(self, repos_rev, change):
+ return self._get_url('copy', repos_rev, change)
+
+ def get_delete_url(self, repos_rev, change):
+ return self._get_url('delete', repos_rev, change)
+
+ def get_modify_url(self, repos_rev, change):
+ return self._get_url('modify', repos_rev, change)
+
+def generate_content(renderer, cfg, repos, changelist, group, params, paths,
+ pool):
+
+ svndate = repos.get_rev_prop(svn.core.SVN_PROP_REVISION_DATE)
+ ### pick a different date format?
+ date = time.ctime(svn.core.secs_from_timestr(svndate, pool))
+
+ diffsels = DiffSelections(cfg, group, params)
+ diffurls = DiffURLSelections(cfg, group, params)
+
+ show_nonmatching_paths = cfg.get('show_nonmatching_paths', group, params) \
+ or 'yes'
+
+ params_with_rev = params.copy()
+ params_with_rev['rev'] = repos.rev
+ commit_url = cfg.get('commit_url', group, params_with_rev)
+
+ # figure out the lists of changes outside the selected path-space
+ other_added_data = other_replaced_data = other_deleted_data = \
+ other_modified_data = [ ]
+ if len(paths) != len(changelist) and show_nonmatching_paths != 'no':
+ other_added_data = generate_list('A', changelist, paths, False)
+ other_replaced_data = generate_list('R', changelist, paths, False)
+ other_deleted_data = generate_list('D', changelist, paths, False)
+ other_modified_data = generate_list('M', changelist, paths, False)
+
+ if len(paths) != len(changelist) and show_nonmatching_paths == 'yes':
+ other_diffs = DiffGenerator(changelist, paths, False, cfg, repos, date,
+ group, params, diffsels, diffurls, pool)
+ else:
+ other_diffs = None
+
+ data = _data(
+ author=repos.author,
+ date=date,
+ rev=repos.rev,
+ log=repos.get_rev_prop(svn.core.SVN_PROP_REVISION_LOG) or '',
+ commit_url=commit_url,
+ added_data=generate_list('A', changelist, paths, True),
+ replaced_data=generate_list('R', changelist, paths, True),
+ deleted_data=generate_list('D', changelist, paths, True),
+ modified_data=generate_list('M', changelist, paths, True),
+ show_nonmatching_paths=show_nonmatching_paths,
+ other_added_data=other_added_data,
+ other_replaced_data=other_replaced_data,
+ other_deleted_data=other_deleted_data,
+ other_modified_data=other_modified_data,
+ diffs=DiffGenerator(changelist, paths, True, cfg, repos, date, group,
+ params, diffsels, diffurls, pool),
+ other_diffs=other_diffs,
+ )
+ renderer.render(data)
+
+
+def generate_list(changekind, changelist, paths, in_paths):
+ if changekind == 'A':
+ selection = lambda change: change.action == svn.repos.CHANGE_ACTION_ADD
+ elif changekind == 'R':
+ selection = lambda change: change.action == svn.repos.CHANGE_ACTION_REPLACE
+ elif changekind == 'D':
+ selection = lambda change: change.action == svn.repos.CHANGE_ACTION_DELETE
+ elif changekind == 'M':
+ selection = lambda change: change.action == svn.repos.CHANGE_ACTION_MODIFY
+
+ items = [ ]
+ for path, change in changelist:
+ if selection(change) and (path in paths) == in_paths:
+ item = _data(
+ path=path,
+ is_dir=change.item_kind == svn.core.svn_node_dir,
+ props_changed=change.prop_changes,
+ text_changed=change.text_changed,
+ copied=(change.action == svn.repos.CHANGE_ACTION_ADD \
+ or change.action == svn.repos.CHANGE_ACTION_REPLACE) \
+ and change.base_path,
+ base_path=remove_leading_slashes(change.base_path),
+ base_rev=change.base_rev,
+ )
+ items.append(item)
+
+ return items
+
+
+class DiffGenerator:
+ "This is a generator-like object returning DiffContent objects."
+
+ def __init__(self, changelist, paths, in_paths, cfg, repos, date, group,
+ params, diffsels, diffurls, pool):
+ self.changelist = changelist
+ self.paths = paths
+ self.in_paths = in_paths
+ self.cfg = cfg
+ self.repos = repos
+ self.date = date
+ self.group = group
+ self.params = params
+ self.diffsels = diffsels
+ self.diffurls = diffurls
+ self.pool = pool
+
+ self.diff = self.diff_url = None
+
+ self.idx = 0
+
+ def __nonzero__(self):
+ # we always have some items
+ return True
+
+ def __getitem__(self, idx):
+ while True:
+ if self.idx == len(self.changelist):
+ raise IndexError
+
+ path, change = self.changelist[self.idx]
+ self.idx = self.idx + 1
+
+ diff = diff_url = None
+ kind = None
+ label1 = None
+ label2 = None
+ src_fname = None
+ dst_fname = None
+ binary = None
+ singular = None
+ content = None
+
+ # just skip directories. they have no diffs.
+ if change.item_kind == svn.core.svn_node_dir:
+ continue
+
+ # is this change in (or out of) the set of matched paths?
+ if (path in self.paths) != self.in_paths:
+ continue
+
+ if change.base_rev != -1:
+ svndate = self.repos.get_rev_prop(svn.core.SVN_PROP_REVISION_DATE,
+ change.base_rev)
+ ### pick a different date format?
+ base_date = time.ctime(svn.core.secs_from_timestr(svndate, self.pool))
+ else:
+ base_date = ''
+
+ # figure out if/how to generate a diff
+
+ base_path = remove_leading_slashes(change.base_path)
+ if change.action == svn.repos.CHANGE_ACTION_DELETE:
+ # it was delete.
+ kind = 'D'
+
+ # get the diff url, if any is specified
+ diff_url = self.diffurls.get_delete_url(self.repos.rev, change)
+
+ # show the diff?
+ if self.diffsels.delete:
+ diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+ base_path, None, None, self.pool)
+
+ label1 = '%s\t%s\t(r%s)' % (base_path, self.date, change.base_rev)
+ label2 = '/dev/null\t00:00:00 1970\t(deleted)'
+ singular = True
+
+ elif change.action == svn.repos.CHANGE_ACTION_ADD \
+ or change.action == svn.repos.CHANGE_ACTION_REPLACE:
+ if base_path and (change.base_rev != -1):
+
+ # any diff of interest?
+ if change.text_changed:
+ # this file was copied and modified.
+ kind = 'W'
+
+ # get the diff url, if any is specified
+ diff_url = self.diffurls.get_copy_url(self.repos.rev, change)
+
+ # show the diff?
+ if self.diffsels.modify:
+ diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+ base_path,
+ self.repos.root_this, change.path,
+ self.pool)
+ label1 = '%s\t%s\t(r%s, copy source)' \
+ % (base_path, base_date, change.base_rev)
+ label2 = '%s\t%s\t(r%s)' \
+ % (change.path, self.date, self.repos.rev)
+ singular = False
+ else:
+ # this file was copied.
+ kind = 'C'
+ if self.diffsels.copy:
+ diff = svn.fs.FileDiff(None, None, self.repos.root_this,
+ change.path, self.pool)
+ label1 = '/dev/null\t00:00:00 1970\t' \
+ '(empty, because file is newly added)'
+ label2 = '%s\t%s\t(r%s, copy of r%s, %s)' \
+ % (change.path, self.date, self.repos.rev, \
+ change.base_rev, base_path)
+ singular = False
+ else:
+ # the file was added.
+ kind = 'A'
+
+ # get the diff url, if any is specified
+ diff_url = self.diffurls.get_add_url(self.repos.rev, change)
+
+ # show the diff?
+ if self.diffsels.add:
+ diff = svn.fs.FileDiff(None, None, self.repos.root_this,
+ change.path, self.pool)
+ label1 = '/dev/null\t00:00:00 1970\t' \
+ '(empty, because file is newly added)'
+ label2 = '%s\t%s\t(r%s)' \
+ % (change.path, self.date, self.repos.rev)
+ singular = True
+
+ elif not change.text_changed:
+ # the text didn't change, so nothing to show.
+ continue
+ else:
+ # a simple modification.
+ kind = 'M'
+
+ # get the diff url, if any is specified
+ diff_url = self.diffurls.get_modify_url(self.repos.rev, change)
+
+ # show the diff?
+ if self.diffsels.modify:
+ diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+ base_path,
+ self.repos.root_this, change.path,
+ self.pool)
+ label1 = '%s\t%s\t(r%s)' \
+ % (base_path, base_date, change.base_rev)
+ label2 = '%s\t%s\t(r%s)' \
+ % (change.path, self.date, self.repos.rev)
+ singular = False
+
+ if diff:
+ binary = diff.either_binary()
+ if binary:
+ content = src_fname = dst_fname = None
+ else:
+ src_fname, dst_fname = diff.get_files()
+ try:
+ content = DiffContent(self.cfg.get_diff_cmd(self.group, {
+ 'label_from' : label1,
+ 'label_to' : label2,
+ 'from' : src_fname,
+ 'to' : dst_fname,
+ }))
+ except OSError:
+ # diff command does not exist, try difflib.unified_diff()
+ content = DifflibDiffContent(label1, label2, src_fname, dst_fname)
+
+ # return a data item for this diff
+ return _data(
+ path=change.path,
+ base_path=base_path,
+ base_rev=change.base_rev,
+ diff=diff,
+ diff_url=diff_url,
+ kind=kind,
+ label_from=label1,
+ label_to=label2,
+ from_fname=src_fname,
+ to_fname=dst_fname,
+ binary=binary,
+ singular=singular,
+ content=content,
+ )
+
+def _classify_diff_line(line, seen_change):
+ # classify the type of line.
+ first = line[:1]
+ ltype = ''
+ if first == '@':
+ seen_change = True
+ ltype = 'H'
+ elif first == '-':
+ if seen_change:
+ ltype = 'D'
+ else:
+ ltype = 'F'
+ elif first == '+':
+ if seen_change:
+ ltype = 'A'
+ else:
+ ltype = 'T'
+ elif first == ' ':
+ ltype = 'C'
+ else:
+ ltype = 'U'
+
+ if line[-2] == '\r':
+ line=line[0:-2] + '\n' # remove carriage return
+
+ return line, ltype, seen_change
+
+
+class DiffContent:
+ "This is a generator-like object returning annotated lines of a diff."
+
+ def __init__(self, cmd):
+ self.seen_change = False
+
+ # By default we choose to incorporate child stderr into the output
+ self.pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ close_fds=sys.platform != "win32")
+
+ def __nonzero__(self):
+ # we always have some items
+ return True
+
+ def __getitem__(self, idx):
+ if self.pipe is None:
+ raise IndexError
+
+ line = self.pipe.stdout.readline()
+ if not line:
+ # wait on the child so we don't end up with a billion zombies
+ self.pipe.wait()
+ self.pipe = None
+ raise IndexError
+
+ line, ltype, self.seen_change = _classify_diff_line(line, self.seen_change)
+ return _data(
+ raw=line,
+ text=line[1:-1], # remove indicator and newline
+ type=ltype,
+ )
+
+class DifflibDiffContent():
+ "This is a generator-like object returning annotated lines of a diff."
+
+ def __init__(self, label_from, label_to, from_file, to_file):
+ import difflib
+ self.seen_change = False
+ fromlines = open(from_file, 'U').readlines()
+ tolines = open(to_file, 'U').readlines()
+ self.diff = difflib.unified_diff(fromlines, tolines,
+ label_from, label_to)
+
+ def __nonzero__(self):
+ # we always have some items
+ return True
+
+ def __getitem__(self, idx):
+
+ try:
+ line = self.diff.next()
+ except StopIteration:
+ raise IndexError
+
+ line, ltype, self.seen_change = _classify_diff_line(line, self.seen_change)
+ return _data(
+ raw=line,
+ text=line[1:-1], # remove indicator and newline
+ type=ltype,
+ )
+
+class TextCommitRenderer:
+ "This class will render the commit mail in plain text."
+
+ def __init__(self, output):
+ self.output = output
+
+ def render(self, data):
+ "Render the commit defined by 'data'."
+
+ w = self.output.write
+
+ w('Author: %s\nDate: %s\nNew Revision: %s\n' % (data.author,
+ data.date,
+ data.rev))
+
+ if data.commit_url:
+ w('URL: %s\n\n' % data.commit_url)
+ else:
+ w('\n')
+
+ w('Log:\n%s\n\n' % data.log.strip())
+
+ # print summary sections
+ self._render_list('Added', data.added_data)
+ self._render_list('Replaced', data.replaced_data)
+ self._render_list('Deleted', data.deleted_data)
+ self._render_list('Modified', data.modified_data)
+
+ if data.other_added_data or data.other_replaced_data \
+ or data.other_deleted_data or data.other_modified_data:
+ if data.show_nonmatching_paths:
+ w('\nChanges in other areas also in this revision:\n')
+ self._render_list('Added', data.other_added_data)
+ self._render_list('Replaced', data.other_replaced_data)
+ self._render_list('Deleted', data.other_deleted_data)
+ self._render_list('Modified', data.other_modified_data)
+ else:
+ w('and changes in other areas\n')
+
+ self._render_diffs(data.diffs, '')
+ if data.other_diffs:
+ self._render_diffs(data.other_diffs,
+ '\nDiffs of changes in other areas also'
+ ' in this revision:\n')
+
+ def _render_list(self, header, data_list):
+ if not data_list:
+ return
+
+ w = self.output.write
+ w(header + ':\n')
+ for d in data_list:
+ if d.is_dir:
+ is_dir = '/'
+ else:
+ is_dir = ''
+ if d.props_changed:
+ if d.text_changed:
+ props = ' (contents, props changed)'
+ else:
+ props = ' (props changed)'
+ else:
+ props = ''
+ w(' %s%s%s\n' % (d.path, is_dir, props))
+ if d.copied:
+ if is_dir:
+ text = ''
+ elif d.text_changed:
+ text = ', changed'
+ else:
+ text = ' unchanged'
+ w(' - copied%s from r%d, %s%s\n'
+ % (text, d.base_rev, d.base_path, is_dir))
+
+ def _render_diffs(self, diffs, section_header):
+ """Render diffs. Write the SECTION_HEADER if there are actually
+ any diffs to render."""
+ if not diffs:
+ return
+ w = self.output.write
+ section_header_printed = False
+
+ for diff in diffs:
+ if not diff.diff and not diff.diff_url:
+ continue
+ if not section_header_printed:
+ w(section_header)
+ section_header_printed = True
+ if diff.kind == 'D':
+ w('\nDeleted: %s\n' % diff.base_path)
+ elif diff.kind == 'A':
+ w('\nAdded: %s\n' % diff.path)
+ elif diff.kind == 'C':
+ w('\nCopied: %s (from r%d, %s)\n'
+ % (diff.path, diff.base_rev, diff.base_path))
+ elif diff.kind == 'W':
+ w('\nCopied and modified: %s (from r%d, %s)\n'
+ % (diff.path, diff.base_rev, diff.base_path))
+ else:
+ # kind == 'M'
+ w('\nModified: %s\n' % diff.path)
+
+ if diff.diff_url:
+ w('URL: %s\n' % diff.diff_url)
+
+ if not diff.diff:
+ continue
+
+ w(SEPARATOR + '\n')
+
+ if diff.binary:
+ if diff.singular:
+ w('Binary file. No diff available.\n')
+ else:
+ w('Binary file (source and/or target). No diff available.\n')
+ continue
+
+ for line in diff.content:
+ w(line.raw)
+
+
+class Repository:
+ "Hold roots and other information about the repository."
+
+ def __init__(self, repos_dir, rev, pool):
+ self.repos_dir = repos_dir
+ self.rev = rev
+ self.pool = pool
+
+ self.repos_ptr = svn.repos.open(repos_dir, pool)
+ self.fs_ptr = svn.repos.fs(self.repos_ptr)
+
+ self.roots = { }
+
+ self.root_this = self.get_root(rev)
+
+ self.author = self.get_rev_prop(svn.core.SVN_PROP_REVISION_AUTHOR)
+
+ def get_rev_prop(self, propname, rev = None):
+ if not rev:
+ rev = self.rev
+ return svn.fs.revision_prop(self.fs_ptr, rev, propname, self.pool)
+
+ def get_root(self, rev):
+ try:
+ return self.roots[rev]
+ except KeyError:
+ pass
+ root = self.roots[rev] = svn.fs.revision_root(self.fs_ptr, rev, self.pool)
+ return root
+
+
+class Config:
+
+ # The predefined configuration sections. These are omitted from the
+ # set of groups.
+ _predefined = ('general', 'defaults', 'maps')
+
+ def __init__(self, fname, repos, global_params):
+ cp = configparser.ConfigParser()
+ cp.read(fname)
+
+ # record the (non-default) groups that we find
+ self._groups = [ ]
+
+ for section in cp.sections():
+ if not hasattr(self, section):
+ section_ob = _sub_section()
+ setattr(self, section, section_ob)
+ if section not in self._predefined:
+ self._groups.append(section)
+ else:
+ section_ob = getattr(self, section)
+ for option in cp.options(section):
+ # get the raw value -- we use the same format for *our* interpolation
+ value = cp.get(section, option, raw=1)
+ setattr(section_ob, option, value)
+
+ # be compatible with old format config files
+ if hasattr(self.general, 'diff') and not hasattr(self.defaults, 'diff'):
+ self.defaults.diff = self.general.diff
+ if not hasattr(self, 'maps'):
+ self.maps = _sub_section()
+
+ # these params are always available, although they may be overridden
+ self._global_params = global_params.copy()
+
+ # prepare maps. this may remove sections from consideration as a group.
+ self._prep_maps()
+
+ # process all the group sections.
+ self._prep_groups(repos)
+
+ def is_set(self, option):
+ """Return None if the option is not set; otherwise, its value is returned.
+
+ The option is specified as a dotted symbol, such as 'general.mail_command'
+ """
+ ob = self
+ for part in option.split('.'):
+ if not hasattr(ob, part):
+ return None
+ ob = getattr(ob, part)
+ return ob
+
+ def get(self, option, group, params):
+ "Get a config value with appropriate substitutions and value mapping."
+
+ # find the right value
+ value = None
+ if group:
+ sub = getattr(self, group)
+ value = getattr(sub, option, None)
+ if value is None:
+ value = getattr(self.defaults, option, '')
+
+ # parameterize it
+ if params is not None:
+ value = value % params
+
+ # apply any mapper
+ mapper = getattr(self.maps, option, None)
+ if mapper is not None:
+ value = mapper(value)
+
+ # Apply any parameters that may now be available for
+ # substitution that were not before the mapping.
+ if value is not None and params is not None:
+ value = value % params
+
+ return value
+
+ def get_diff_cmd(self, group, args):
+ "Get a diff command as a list of argv elements."
+ ### do some better splitting to enable quoting of spaces
+ diff_cmd = self.get('diff', group, None).split()
+
+ cmd = [ ]
+ for part in diff_cmd:
+ cmd.append(part % args)
+ return cmd
+
+ def _prep_maps(self):
+ "Rewrite the [maps] options into callables that look up values."
+
+ mapsections = []
+
+ for optname, mapvalue in vars(self.maps).items():
+ if mapvalue[:1] == '[':
+ # a section is acting as a mapping
+ sectname = mapvalue[1:-1]
+ if not hasattr(self, sectname):
+ raise UnknownMappingSection(sectname)
+ # construct a lambda to look up the given value as an option name,
+ # and return the option's value. if the option is not present,
+ # then just return the value unchanged.
+ setattr(self.maps, optname,
+ lambda value,
+ sect=getattr(self, sectname): getattr(sect,
+ value.lower(),
+ value))
+ # mark for removal when all optnames are done
+ if sectname not in mapsections:
+ mapsections.append(sectname)
+
+ # elif test for other mapper types. possible examples:
+ # dbm:filename.db
+ # file:two-column-file.txt
+ # ldap:some-query-spec
+ # just craft a mapper function and insert it appropriately
+
+ else:
+ raise UnknownMappingSpec(mapvalue)
+
+ # remove each mapping section from consideration as a group
+ for sectname in mapsections:
+ self._groups.remove(sectname)
+
+
+ def _prep_groups(self, repos):
+ self._group_re = [ ]
+
+ repos_dir = os.path.abspath(repos.repos_dir)
+
+ # compute the default repository-based parameters. start with some
+ # basic parameters, then bring in the regex-based params.
+ self._default_params = self._global_params
+
+ try:
+ match = re.match(self.defaults.for_repos, repos_dir)
+ if match:
+ self._default_params = self._default_params.copy()
+ self._default_params.update(match.groupdict())
+ except AttributeError:
+ # there is no self.defaults.for_repos
+ pass
+
+ # select the groups that apply to this repository
+ for group in self._groups:
+ sub = getattr(self, group)
+ params = self._default_params
+ if hasattr(sub, 'for_repos'):
+ match = re.match(sub.for_repos, repos_dir)
+ if not match:
+ continue
+ params = params.copy()
+ params.update(match.groupdict())
+
+ # if a matching rule hasn't been given, then use the empty string
+ # as it will match all paths
+ for_paths = getattr(sub, 'for_paths', '')
+ exclude_paths = getattr(sub, 'exclude_paths', None)
+ if exclude_paths:
+ exclude_paths_re = re.compile(exclude_paths)
+ else:
+ exclude_paths_re = None
+
+ # check search_logmsg re
+ search_logmsg = getattr(sub, 'search_logmsg', None)
+ if search_logmsg is not None:
+ search_logmsg_re = re.compile(search_logmsg)
+ else:
+ search_logmsg_re = None
+
+ self._group_re.append((group,
+ re.compile(for_paths),
+ exclude_paths_re,
+ params,
+ search_logmsg_re))
+
+ # after all the groups are done, add in the default group
+ try:
+ self._group_re.append((None,
+ re.compile(self.defaults.for_paths),
+ None,
+ self._default_params,
+ None))
+ except AttributeError:
+ # there is no self.defaults.for_paths
+ pass
+
+ def which_groups(self, path, logmsg):
+ "Return the path's associated groups."
+ groups = []
+ for group, pattern, exclude_pattern, repos_params, search_logmsg_re in self._group_re:
+ match = pattern.match(path)
+ if match:
+ if exclude_pattern and exclude_pattern.match(path):
+ continue
+ params = repos_params.copy()
+ params.update(match.groupdict())
+
+ if search_logmsg_re is None:
+ groups.append((group, params))
+ else:
+ if logmsg is None:
+ logmsg = ''
+
+ for match in search_logmsg_re.finditer(logmsg):
+ # Add captured variables to (a copy of) params
+ msg_params = params.copy()
+ msg_params.update(match.groupdict())
+ groups.append((group, msg_params))
+
+ if not groups:
+ groups.append((None, self._default_params))
+
+ return groups
+
+
+class _sub_section:
+ pass
+
+class _data:
+ "Helper class to define an attribute-based hunk o' data."
+ def __init__(self, **kw):
+ vars(self).update(kw)
+
+class MissingConfig(Exception):
+ pass
+class UnknownMappingSection(Exception):
+ pass
+class UnknownMappingSpec(Exception):
+ pass
+class UnknownSubcommand(Exception):
+ pass
+
+
+if __name__ == '__main__':
+ def usage():
+ scriptname = os.path.basename(sys.argv[0])
+ sys.stderr.write(
+"""USAGE: %s commit REPOS REVISION [CONFIG-FILE]
+ %s propchange REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE]
+ %s propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION [CONFIG-FILE]
+ %s lock REPOS AUTHOR [CONFIG-FILE]
+ %s unlock REPOS AUTHOR [CONFIG-FILE]
+
+If no CONFIG-FILE is provided, the script will first search for a mailer.conf
+file in REPOS/conf/. Failing that, it will search the directory in which
+the script itself resides.
+
+ACTION was added as a fifth argument to the post-revprop-change hook
+in Subversion 1.2.0. Its value is one of 'A', 'M' or 'D' to indicate
+if the property was added, modified or deleted, respectively.
+
+""" % (scriptname, scriptname, scriptname, scriptname, scriptname))
+ sys.exit(1)
+
+ # Command list: subcommand -> number of arguments expected (not including
+ # the repository directory and config-file)
+ cmd_list = {'commit' : 1,
+ 'propchange' : 3,
+ 'propchange2': 4,
+ 'lock' : 1,
+ 'unlock' : 1,
+ }
+
+ config_fname = None
+ argc = len(sys.argv)
+ if argc < 3:
+ usage()
+
+ cmd = sys.argv[1]
+ repos_dir = svn.core.svn_path_canonicalize(sys.argv[2])
+ try:
+ expected_args = cmd_list[cmd]
+ except KeyError:
+ usage()
+
+ if argc < (expected_args + 3):
+ usage()
+ elif argc > expected_args + 4:
+ usage()
+ elif argc == (expected_args + 4):
+ config_fname = sys.argv[expected_args + 3]
+
+ # Settle on a config file location, and open it.
+ if config_fname is None:
+ # Default to REPOS-DIR/conf/mailer.conf.
+ config_fname = os.path.join(repos_dir, 'conf', 'mailer.conf')
+ if not os.path.exists(config_fname):
+ # Okay. Look for 'mailer.conf' as a sibling of this script.
+ config_fname = os.path.join(os.path.dirname(sys.argv[0]), 'mailer.conf')
+ if not os.path.exists(config_fname):
+ raise MissingConfig(config_fname)
+
+ svn.core.run_app(main, cmd, config_fname, repos_dir,
+ sys.argv[3:3+expected_args])
+
+# ------------------------------------------------------------------------
+# TODO
+#
+# * add configuration options
+# - each group defines delivery info:
+# o whether to set Reply-To and/or Mail-Followup-To
+# (btw: it is legal do set Reply-To since this is the originator of the
+# mail; i.e. different from MLMs that munge it)
+# - each group defines content construction:
+# o max size of diff before trimming
+# o max size of entire commit message before truncation
+# - per-repository configuration
+# o extra config living in repos
+# o optional, non-mail log file
+# o look up authors (username -> email; for the From: header) in a
+# file(s) or DBM
+# * get rid of global functions that should properly be class methods
diff --git a/tools/hook-scripts/mailer/tests/mailer-init.sh b/tools/hook-scripts/mailer/tests/mailer-init.sh
new file mode 100755
index 0000000..d0a4a79
--- /dev/null
+++ b/tools/hook-scripts/mailer/tests/mailer-init.sh
@@ -0,0 +1,116 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# mailer-init.sh: create and initialize a repository for the mailer tests
+#
+# USAGE: ./mailer-init.sh
+#
+
+scripts="`dirname $0`"
+scripts="`cd $scripts && pwd`"
+
+d=$scripts/mailer-init.$$
+mkdir $d
+cd $d
+echo "test directory is: $d"
+
+svnadmin create repos
+svn co file://`pwd`/repos wc
+cd wc
+
+# create a bunch of dirs and files
+mkdir dir1 dir2
+echo file1 > file1
+echo file2 > file2
+echo file3 > dir1/file3
+echo file4 > dir1/file4
+echo file5 > dir2/file5
+echo file6 > dir2/file6
+svn add *
+svn commit -m "initial load"
+
+# make some changes and set some properties
+svn ps prop1 propval1 file1
+echo change C1 >> file2
+svn ps svn:keywords Id file2
+svn ps --force svn:new_svn_prop val file2
+svn ps prop1 propval1 file2
+svn ps prop3 propval3 dir1
+echo change C2 >> dir2/file5
+svn commit -m "two file changes. Fixes Blah#123"
+
+# copy a file and a dir and change property
+svn cp file1 dir2/file7
+svn cp dir1 dir3
+svn ps prop3 propval4 dir3
+svn commit -m "two copies"
+
+# copy and modify a file
+svn cp file1 dir3/file8
+echo change C3 >> dir3/file8
+svn commit -m "copied and changed"
+
+# change and delete properties
+svn ps svn:keywords Date file2
+svn ps prop2 propval2 file2
+svn pd prop1 file2
+svn pd svn:new_svn_prop file2
+svn ps prop3 propval4 dir1
+svn pd prop3 dir3
+svn up # make sure our dirs are up to date
+svn commit -m "changes and deletes of properties"
+
+# add a file, add a dir, and make a change
+echo file9 > file9
+svn add file9
+svn mkdir dir4
+echo change C4 >> dir1/file3
+svn commit -m "mixed addition and change. Fixes Blaz#456 Blah#987"
+
+# add a file, add a dir, delete a file, delete a dir, and make a change
+echo file10 > dir1/file10
+svn add dir1/file10
+svn mkdir dir3/dir5
+svn rm file2 dir2
+echo change C5 >> dir3/file3
+svn up # make sure our dirs are up to date
+svn commit -m "adds, deletes, and a change"
+
+# copy a dir and change a file in it
+svn cp dir3 dir6
+echo change C6 >> dir6/file4
+svn commit -m "copy dir, then make a change"
+
+# add a binary file and set property to binary value
+printf "\x00\x01\x02\x03\x04\n" > file11
+svn add file11
+svn ps svn:mime-type application/octect-stream file11
+svn ps prop2 -F file11 file9
+svn commit -m "add binary file"
+
+# change the binary file and set property to non binary value
+printf "\x20\x01\x02\x20\n" > file11
+svn ps prop2 propval2 file9
+svn commit -m "change binary file"
+
+# tweak the commit dates to known quantities
+$scripts/mailer-tweak.py ../repos
diff --git a/tools/hook-scripts/mailer/tests/mailer-t1.output b/tools/hook-scripts/mailer/tests/mailer-t1.output
new file mode 100644
index 0000000..10466ff
--- /dev/null
+++ b/tools/hook-scripts/mailer/tests/mailer-t1.output
@@ -0,0 +1,751 @@
+Group: file
+Subject: r1 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 01:46:40 2001
+New Revision: 1
+
+Log:
+initial load
+
+Added:
+ file1
+ file2
+
+Added: file1
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file1 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file1
+
+Added: file2
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file2 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file2
+Group: file plus other areas
+Subject: r1 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 01:46:40 2001
+New Revision: 1
+
+Log:
+initial load
+
+Added:
+ file1
+ file2
+
+Changes in other areas also in this revision:
+Added:
+ dir1/
+ dir1/file3
+ dir1/file4
+ dir2/
+ dir2/file5
+ dir2/file6
+
+Added: file1
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file1 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file1
+
+Added: file2
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file2 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file2
+
+Diffs of changes in other areas also in this revision:
+
+Added: dir1/file3
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file3 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file3
+
+Added: dir1/file4
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file4 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file4
+
+Added: dir2/file5
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir2/file5 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file5
+
+Added: dir2/file6
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir2/file6 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file6
+Group: All
+Subject: r1 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 01:46:40 2001
+New Revision: 1
+
+Log:
+initial load
+
+Added:
+ dir1/
+ dir1/file3
+ dir1/file4
+ dir2/
+ dir2/file5
+ dir2/file6
+ file1
+ file2
+
+Added: dir1/file3
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file3 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file3
+
+Added: dir1/file4
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file4 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file4
+
+Added: dir2/file5
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir2/file5 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file5
+
+Added: dir2/file6
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir2/file6 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file6
+
+Added: file1
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file1 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file1
+
+Added: file2
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file2 Sun Sep 9 01:46:40 2001 (r1)
+@@ -0,0 +1 @@
++file2
+Group: file
+Subject: r2 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 04:33:20 2001
+New Revision: 2
+
+Log:
+two file changes. Fixes Blah#123
+
+Modified:
+ file1 (props changed)
+ file2 (contents, props changed)
+
+Modified: file2
+==============================================================================
+--- file2 Sun Sep 9 01:46:40 2001 (r1)
++++ file2 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file2
++change C1
+Group: bugtracker
+Subject: Fix for Blah#123: r2 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 04:33:20 2001
+New Revision: 2
+
+Log:
+two file changes. Fixes Blah#123
+
+Modified:
+ dir1/ (props changed)
+ dir2/file5
+ file1 (props changed)
+ file2 (contents, props changed)
+
+Modified: dir2/file5
+==============================================================================
+--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1)
++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file5
++change C2
+
+Modified: file2
+==============================================================================
+--- file2 Sun Sep 9 01:46:40 2001 (r1)
++++ file2 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file2
++change C1
+Group: All
+Subject: r2 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 04:33:20 2001
+New Revision: 2
+
+Log:
+two file changes. Fixes Blah#123
+
+Modified:
+ dir1/ (props changed)
+ dir2/file5
+ file1 (props changed)
+ file2 (contents, props changed)
+
+Modified: dir2/file5
+==============================================================================
+--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1)
++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file5
++change C2
+
+Modified: file2
+==============================================================================
+--- file2 Sun Sep 9 01:46:40 2001 (r1)
++++ file2 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file2
++change C1
+Group: file plus other areas
+Subject: r2 - dir1 dir2
+
+Author: mailer test
+Date: Sun Sep 9 04:33:20 2001
+New Revision: 2
+
+Log:
+two file changes. Fixes Blah#123
+
+Modified:
+ file1 (props changed)
+ file2 (contents, props changed)
+
+Changes in other areas also in this revision:
+Modified:
+ dir1/ (props changed)
+ dir2/file5
+
+Modified: file2
+==============================================================================
+--- file2 Sun Sep 9 01:46:40 2001 (r1)
++++ file2 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file2
++change C1
+
+Diffs of changes in other areas also in this revision:
+
+Modified: dir2/file5
+==============================================================================
+--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1)
++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2)
+@@ -1 +1,2 @@
+ file5
++change C2
+Group: All
+Subject: r3 - dir2 dir3
+
+Author: mailer test
+Date: Sun Sep 9 07:20:00 2001
+New Revision: 3
+
+Log:
+two copies
+
+Added:
+ dir2/file7
+ - copied unchanged from r2, file1
+ dir3/ (props changed)
+ - copied from r2, dir1/
+
+Copied: dir2/file7 (from r2, file1)
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir2/file7 Sun Sep 9 07:20:00 2001 (r3, copy of r2, file1)
+@@ -0,0 +1 @@
++file1
+Group: All
+Subject: r4 - dir3
+
+Author: mailer test
+Date: Sun Sep 9 10:06:40 2001
+New Revision: 4
+
+Log:
+copied and changed
+
+Added:
+ dir3/file8
+ - copied, changed from r2, file1
+
+Copied and modified: dir3/file8 (from r2, file1)
+==============================================================================
+--- file1 Sun Sep 9 04:33:20 2001 (r2, copy source)
++++ dir3/file8 Sun Sep 9 10:06:40 2001 (r4)
+@@ -1 +1,2 @@
+ file1
++change C3
+Group: file
+Subject: r5 - dir1 dir3
+
+Author: mailer test
+Date: Sun Sep 9 12:53:20 2001
+New Revision: 5
+
+Log:
+changes and deletes of properties
+
+Modified:
+ file2 (props changed)
+Group: file plus other areas
+Subject: r5 - dir1 dir3
+
+Author: mailer test
+Date: Sun Sep 9 12:53:20 2001
+New Revision: 5
+
+Log:
+changes and deletes of properties
+
+Modified:
+ file2 (props changed)
+
+Changes in other areas also in this revision:
+Modified:
+ dir1/ (props changed)
+ dir3/ (props changed)
+Group: All
+Subject: r5 - dir1 dir3
+
+Author: mailer test
+Date: Sun Sep 9 12:53:20 2001
+New Revision: 5
+
+Log:
+changes and deletes of properties
+
+Modified:
+ dir1/ (props changed)
+ dir3/ (props changed)
+ file2 (props changed)
+Group: file
+Subject: r6 - dir1 dir4
+
+Author: mailer test
+Date: Sun Sep 9 15:40:00 2001
+New Revision: 6
+
+Log:
+mixed addition and change. Fixes Blaz#456 Blah#987
+
+Added:
+ file9
+
+Added: file9
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file9 Sun Sep 9 15:40:00 2001 (r6)
+@@ -0,0 +1 @@
++file9
+Group: file plus other areas
+Subject: r6 - dir1 dir4
+
+Author: mailer test
+Date: Sun Sep 9 15:40:00 2001
+New Revision: 6
+
+Log:
+mixed addition and change. Fixes Blaz#456 Blah#987
+
+Added:
+ file9
+
+Changes in other areas also in this revision:
+Added:
+ dir4/
+Modified:
+ dir1/file3
+
+Added: file9
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file9 Sun Sep 9 15:40:00 2001 (r6)
+@@ -0,0 +1 @@
++file9
+
+Diffs of changes in other areas also in this revision:
+
+Modified: dir1/file3
+==============================================================================
+--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5)
++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6)
+@@ -1 +1,2 @@
+ file3
++change C4
+Group: bugtracker
+Subject: Fix for Blaz#456: r6 - dir1 dir4
+
+Author: mailer test
+Date: Sun Sep 9 15:40:00 2001
+New Revision: 6
+
+Log:
+mixed addition and change. Fixes Blaz#456 Blah#987
+
+Added:
+ dir4/
+ file9
+Modified:
+ dir1/file3
+
+Modified: dir1/file3
+==============================================================================
+--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5)
++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6)
+@@ -1 +1,2 @@
+ file3
++change C4
+
+Added: file9
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file9 Sun Sep 9 15:40:00 2001 (r6)
+@@ -0,0 +1 @@
++file9
+Group: bugtracker
+Subject: Fix for Blah#987: r6 - dir1 dir4
+
+Author: mailer test
+Date: Sun Sep 9 15:40:00 2001
+New Revision: 6
+
+Log:
+mixed addition and change. Fixes Blaz#456 Blah#987
+
+Added:
+ dir4/
+ file9
+Modified:
+ dir1/file3
+
+Modified: dir1/file3
+==============================================================================
+--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5)
++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6)
+@@ -1 +1,2 @@
+ file3
++change C4
+
+Added: file9
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file9 Sun Sep 9 15:40:00 2001 (r6)
+@@ -0,0 +1 @@
++file9
+Group: All
+Subject: r6 - dir1 dir4
+
+Author: mailer test
+Date: Sun Sep 9 15:40:00 2001
+New Revision: 6
+
+Log:
+mixed addition and change. Fixes Blaz#456 Blah#987
+
+Added:
+ dir4/
+ file9
+Modified:
+ dir1/file3
+
+Modified: dir1/file3
+==============================================================================
+--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5)
++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6)
+@@ -1 +1,2 @@
+ file3
++change C4
+
+Added: file9
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ file9 Sun Sep 9 15:40:00 2001 (r6)
+@@ -0,0 +1 @@
++file9
+Group: file
+Subject: r7 - dir1 dir2 dir3 dir3/dir5
+
+Author: mailer test
+Date: Sun Sep 9 18:26:40 2001
+New Revision: 7
+
+Log:
+adds, deletes, and a change
+
+Deleted:
+ file2
+
+Deleted: file2
+==============================================================================
+--- file2 Sun Sep 9 18:26:40 2001 (r6)
++++ /dev/null 00:00:00 1970 (deleted)
+@@ -1,2 +0,0 @@
+-file2
+-change C1
+Group: file plus other areas
+Subject: r7 - dir1 dir2 dir3 dir3/dir5
+
+Author: mailer test
+Date: Sun Sep 9 18:26:40 2001
+New Revision: 7
+
+Log:
+adds, deletes, and a change
+
+Deleted:
+ file2
+
+Changes in other areas also in this revision:
+Added:
+ dir1/file10
+ dir3/dir5/
+Deleted:
+ dir2/
+Modified:
+ dir3/file3
+
+Deleted: file2
+==============================================================================
+--- file2 Sun Sep 9 18:26:40 2001 (r6)
++++ /dev/null 00:00:00 1970 (deleted)
+@@ -1,2 +0,0 @@
+-file2
+-change C1
+
+Diffs of changes in other areas also in this revision:
+
+Added: dir1/file10
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file10 Sun Sep 9 18:26:40 2001 (r7)
+@@ -0,0 +1 @@
++file10
+
+Modified: dir3/file3
+==============================================================================
+--- dir3/file3 Sun Sep 9 15:40:00 2001 (r6)
++++ dir3/file3 Sun Sep 9 18:26:40 2001 (r7)
+@@ -1 +1,2 @@
+ file3
++change C5
+Group: All
+Subject: r7 - dir1 dir2 dir3 dir3/dir5
+
+Author: mailer test
+Date: Sun Sep 9 18:26:40 2001
+New Revision: 7
+
+Log:
+adds, deletes, and a change
+
+Added:
+ dir1/file10
+ dir3/dir5/
+Deleted:
+ dir2/
+ file2
+Modified:
+ dir3/file3
+
+Added: dir1/file10
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir1/file10 Sun Sep 9 18:26:40 2001 (r7)
+@@ -0,0 +1 @@
++file10
+
+Modified: dir3/file3
+==============================================================================
+--- dir3/file3 Sun Sep 9 15:40:00 2001 (r6)
++++ dir3/file3 Sun Sep 9 18:26:40 2001 (r7)
+@@ -1 +1,2 @@
+ file3
++change C5
+
+Deleted: file2
+==============================================================================
+--- file2 Sun Sep 9 18:26:40 2001 (r6)
++++ /dev/null 00:00:00 1970 (deleted)
+@@ -1,2 +0,0 @@
+-file2
+-change C1
+Group: All
+Subject: r8 - in dir6: . dir5
+
+Author: mailer test
+Date: Sun Sep 9 21:13:20 2001
+New Revision: 8
+
+Log:
+copy dir, then make a change
+
+Added:
+ dir6/
+ - copied from r6, dir3/
+ dir6/dir5/
+ - copied from r7, dir3/dir5/
+Replaced:
+ dir6/file3
+ - copied unchanged from r7, dir3/file3
+Modified:
+ dir6/file4
+
+Copied: dir6/file3 (from r7, dir3/file3)
+==============================================================================
+--- /dev/null 00:00:00 1970 (empty, because file is newly added)
++++ dir6/file3 Sun Sep 9 21:13:20 2001 (r8, copy of r7, dir3/file3)
+@@ -0,0 +1,2 @@
++file3
++change C5
+
+Modified: dir6/file4
+==============================================================================
+--- dir3/file4 Sun Sep 9 15:40:00 2001 (r6)
++++ dir6/file4 Sun Sep 9 21:13:20 2001 (r8)
+@@ -1 +1,2 @@
+ file4
++change C6
+Group: file
+Subject: r9 -
+
+Author: mailer test
+Date: Mon Sep 10 00:00:00 2001
+New Revision: 9
+
+Log:
+add binary file
+
+Added:
+ file11 (contents, props changed)
+Modified:
+ file9 (props changed)
+
+Added: file11
+==============================================================================
+Binary file. No diff available.
+Group: file plus other areas
+Subject: r9 -
+
+Author: mailer test
+Date: Mon Sep 10 00:00:00 2001
+New Revision: 9
+
+Log:
+add binary file
+
+Added:
+ file11 (contents, props changed)
+Modified:
+ file9 (props changed)
+
+Added: file11
+==============================================================================
+Binary file. No diff available.
+Group: All
+Subject: r9 -
+
+Author: mailer test
+Date: Mon Sep 10 00:00:00 2001
+New Revision: 9
+
+Log:
+add binary file
+
+Added:
+ file11 (contents, props changed)
+Modified:
+ file9 (props changed)
+
+Added: file11
+==============================================================================
+Binary file. No diff available.
+Group: file
+Subject: r10 -
+
+Author: mailer test
+Date: Mon Sep 10 02:46:40 2001
+New Revision: 10
+
+Log:
+change binary file
+
+Modified:
+ file11
+ file9 (props changed)
+
+Modified: file11
+==============================================================================
+Binary file (source and/or target). No diff available.
+Group: file plus other areas
+Subject: r10 -
+
+Author: mailer test
+Date: Mon Sep 10 02:46:40 2001
+New Revision: 10
+
+Log:
+change binary file
+
+Modified:
+ file11
+ file9 (props changed)
+
+Modified: file11
+==============================================================================
+Binary file (source and/or target). No diff available.
+Group: All
+Subject: r10 -
+
+Author: mailer test
+Date: Mon Sep 10 02:46:40 2001
+New Revision: 10
+
+Log:
+change binary file
+
+Modified:
+ file11
+ file9 (props changed)
+
+Modified: file11
+==============================================================================
+Binary file (source and/or target). No diff available.
diff --git a/tools/hook-scripts/mailer/tests/mailer-t1.sh b/tools/hook-scripts/mailer/tests/mailer-t1.sh
new file mode 100755
index 0000000..ff4b6ca
--- /dev/null
+++ b/tools/hook-scripts/mailer/tests/mailer-t1.sh
@@ -0,0 +1,60 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# mailer-t1.sh: test #1 for the mailer.py script
+#
+# This test generates "email" for each revision in the repository,
+# concatenating them into one big blob, which is then compared against
+# a known output.
+#
+# Note: mailer-tweak.py must have been run to make the test outputs
+# consistent and reproducible
+#
+# USAGE: ./mailer-t1.sh REPOS MAILER-SCRIPT
+#
+
+if test "$#" != 2; then
+ echo "USAGE: ./mailer-t1.sh REPOS MAILER-SCRIPT"
+ exit 1
+fi
+
+scripts="`dirname $0`"
+scripts="`cd $scripts && pwd`"
+
+glom=$scripts/mailer-t1.current
+orig=$scripts/mailer-t1.output
+conf=$scripts/mailer.conf
+rm -f $glom
+
+export TZ=GST
+
+youngest="`svnlook youngest $1`"
+for rev in `python -c "print(\" \".join(map(str, range(1,$youngest+1))))"`; do
+ $2 commit $1 $rev $conf >> $glom
+done
+
+echo "current mailer.py output in: $glom"
+
+dos2unix $glom
+
+echo diff -q $orig $glom
+diff -q $orig $glom && echo "SUCCESS: no differences detected"
diff --git a/tools/hook-scripts/mailer/tests/mailer-tweak.py b/tools/hook-scripts/mailer/tests/mailer-tweak.py
new file mode 100755
index 0000000..0805980
--- /dev/null
+++ b/tools/hook-scripts/mailer/tests/mailer-tweak.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# mailer-tweak.py: tweak the svn:date and svn:author properties
+# on all revisions
+#
+# We need constant dates and authors for the revisions so that we can
+# consistently compare an output against a known quantity.
+#
+# USAGE: ./mailer-tweak.py REPOS
+#
+
+
+import sys
+import os
+import getopt
+
+from svn import fs, core
+
+DATE_BASE = 1000000000
+DATE_INCR = 10000
+
+
+def tweak_dates(pool, home='.'):
+ db_path = os.path.join(home, 'db')
+ if not os.path.exists(db_path):
+ db_path = home
+
+ fsob = fs.new(None, pool)
+ fs.open_berkeley(fsob, db_path)
+
+ for i in range(fs.youngest_rev(fsob, pool)):
+ # convert secs into microseconds, then a string
+ date = core.svn_time_to_cstring((DATE_BASE+i*DATE_INCR) * 1000000L, pool)
+ #print date
+ fs.change_rev_prop(fsob, i+1, core.SVN_PROP_REVISION_DATE, date, pool)
+ fs.change_rev_prop(fsob, i+1, core.SVN_PROP_REVISION_AUTHOR, 'mailer test', pool)
+
+def main():
+ if len(sys.argv) != 2:
+ print('USAGE: %s REPOS' % sys.argv[0])
+ sys.exit(1)
+
+ core.run_app(tweak_dates, sys.argv[1])
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/hook-scripts/mailer/tests/mailer.conf b/tools/hook-scripts/mailer/tests/mailer.conf
new file mode 100644
index 0000000..ac25f24
--- /dev/null
+++ b/tools/hook-scripts/mailer/tests/mailer.conf
@@ -0,0 +1,365 @@
+#
+# mailer.conf: example configuration file for mailer.py
+#
+# $Id: mailer.conf 1086097 2011-03-28 02:14:33Z gmcdonald $
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+[general]
+
+# The [general].diff option is now DEPRECATED.
+# Instead use [defaults].diff .
+
+#
+# One delivery method must be chosen. mailer.py will prefer using the
+# "mail_command" option. If that option is empty or commented out,
+# then it checks whether the "smtp_hostname" option has been
+# specified. If neither option is set, then the commit message is
+# delivered to stdout.
+#
+
+# This command will be invoked with destination addresses on the command
+# line, and the message piped into it.
+#mail_command = /usr/sbin/sendmail
+
+# This option specifies the hostname for delivery via SMTP.
+#smtp_hostname = localhost
+
+# Username and password for SMTP servers requiring authorisation.
+#smtp_username = example
+#smtp_password = example
+
+# --------------------------------------------------------------------------
+
+#
+# CONFIGURATION GROUPS
+#
+# Any sections other than [general], [defaults], [maps] and sections
+# referred to within [maps] are considered to be user-defined groups
+# which override values in the [defaults] section.
+# These groups are selected using the following two options:
+#
+# for_repos
+# for_paths
+#
+# Both options specify a regular expression. The former is matched against
+# the absolute path to the repository the mailer is operating against. The
+# second is matched against *every* path (files and dirs) that was modified
+# during the commit.
+#
+# The options specified in the [defaults] section are always selected. The
+# presence of a non-matching for_repos has no relevance. Note that you may
+# still use a for_repos value to extract useful information (more on this
+# later). Any user-defined groups without a for_repos, or which contains
+# a matching for_repos, will be selected for potential use.
+#
+# The subset of user-defined groups identified by the repository are further
+# refined based on the for_paths option. A group is selected if at least
+# one path(*) in the commit matches the for_paths regular expression. Note
+# that the paths are relative to the root of the repository and do not
+# have a leading slash.
+#
+# (*) Actually, each path will select just one group. Thus, it is possible
+# that one group will match against all paths, while another group matches
+# none of the paths, even though its for_paths would have selected some of
+# the paths in the commit.
+#
+# Groups are matched in no particular order. Do not depend upon their
+# order within this configuration file. The values from [defaults] will
+# be used if no group is matched or an option in a group does not override
+# the corresponding value from [defaults].
+#
+# Generally, a commit email is generated for each group that has been
+# selected. The script will try to minimize mails, so it may be possible
+# that a single message will be generated to multiple recipients. In
+# addition, it is possible for multiple messages per group to be generated,
+# based on the various substitutions that are performed (see the following
+# section).
+#
+#
+# SUBSTITUTIONS
+#
+# The regular expressions can use the "named group" syntax to extract
+# interesting pieces of the repository or commit path. These named values
+# can then be substituted in the option values during mail generation.
+#
+# For example, let's say that you have a repository with a top-level
+# directory named "clients", with several client projects underneath:
+#
+# REPOS/
+# clients/
+# gsvn/
+# rapidsvn/
+# winsvn/
+#
+# The client name can be extracted with a regular expression like:
+#
+# for_paths = clients/(?P<client>[^/]*)($|/)
+#
+# The substitution is performed using Python's dict-based string
+# interpolation syntax:
+#
+# to_addr = commits@%(client)s.tigris.org
+#
+# The %(NAME)s syntax will substitute whatever value for NAME was captured
+# in the for_repos and for_paths regular expressions. The set of names
+# available is obtained from the following set of regular expressions:
+#
+# [defaults].for_repos (if present)
+# [GROUP].for_repos (if present in the user-defined group "GROUP")
+# [GROUP].for_paths (if present in the user-defined group "GROUP")
+#
+# The names from the regexes later in the list override the earlier names.
+# If none of the groups match, but a for_paths is present in [defaults],
+# then its extracted names will be available.
+#
+# Note that each unique set of names for substitution will generate an
+# email. In the above example, if a commit modified files in all three
+# client subdirectories, then an email will be sent to all three commits@
+# mailing lists on tigris.org.
+#
+# The substitution variable "author" is provided by default, and is set
+# to the author name passed to mailer.py for revprop changes or the
+# author defined for a revision; if neither is available, then it is
+# set to "no_author". Thus, you might define a line like:
+#
+# from_addr = %(author)s@example.com
+#
+#
+# SUMMARY
+#
+# While mailer.py will work to minimize the number of mail messages
+# generated, a single commit can potentially generate a large number
+# of variants of a commit message. The criteria for generating messages
+# is based on:
+#
+# groups selected by for_repos
+# groups selected by for_paths
+# unique sets of parameters extracted by the above regular expressions
+#
+
+[defaults]
+
+# This is not passed to the shell, so do not use shell metacharacters.
+# The command is split around whitespace, so if you want to include
+# whitespace in the command, then ### something ###.
+diff = /usr/bin/diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s
+
+# The default prefix for the Subject: header for commits.
+commit_subject_prefix =
+
+# The default prefix for the Subject: header for propchanges.
+propchange_subject_prefix =
+
+# The default prefix for the Subject: header for locks.
+lock_subject_prefix =
+
+# The default prefix for the Subject: header for unlocks.
+unlock_subject_prefix =
+
+
+# The default From: address for messages. If the from_addr is not
+# specified or it is specified but there is no text after the `=',
+# then the revision's author is used as the from address. If the
+# revision author is not specified, such as when a commit is done
+# without requiring authentication and authorization, then the string
+# 'no_author' is used. You can specify a default from_addr here and
+# if you want to have a particular for_repos group use the author as
+# the from address, you can use "from_addr =".
+from_addr = invalid@example.com
+
+# The default To: addresses for message. One or more addresses,
+# separated by whitespace (no commas).
+# NOTE: If you want to use a different character for separating the
+# addresses put it in front of the addresses included in square
+# brackets '[ ]'.
+to_addr = invalid@example.com
+
+# If this is set, then a Reply-To: will be inserted into the message.
+reply_to =
+
+# Specify which types of repository changes mailer.py will create
+# diffs for. Valid options are any combination of
+# 'add copy modify delete', or 'none' to never create diffs.
+# If the generate_diffs option is empty, the selection is controlled
+# by the deprecated options suppress_deletes and suppress_adds.
+# Note that this only affects the display of diffs - all changes are
+# mentioned in the summary of changed paths at the top of the message,
+# regardless of this option's value.
+# Meaning of the possible values:
+# add: generates diffs for all added paths
+# copy: generates diffs for all copied paths
+# which were not changed after copying
+# modify: generates diffs for all modified paths, including paths that were
+# copied and modified afterwards (within the same commit)
+# delete: generates diffs for all removed paths
+generate_diffs = add copy modify delete
+
+# Commit URL construction. This adds a URL to the top of the message
+# that can lead the reader to a Trac, ViewVC or other view of the
+# commit as a whole.
+#
+# The available substitution variable is: rev
+#commit_url = http://diffs.server.com/trac/software/changeset/%(rev)s
+
+# Diff URL construction. For the configured diff URL types, the diff
+# section (which follows the message header) will include the URL
+# relevant to the change type, even if actual diff generation for that
+# change type is disabled (per the generate_diffs option).
+#
+# Available substitution variables are: path, base_path, rev, base_rev
+#diff_add_url =
+#diff_copy_url =
+#diff_modify_url = http://diffs.server.com/?p1=%(base_path)s&p2=%(path)s
+#diff_delete_url =
+
+# When set to "yes", the mailer will suppress the creation of a diff which
+# deletes all the lines in the file. If this is set to anything else, or
+# is simply commented out, then the diff will be inserted. Note that the
+# deletion is always mentioned in the message header, regardless of this
+# option's value.
+### DEPRECATED (if generate_diffs is not empty, this option is ignored)
+#suppress_deletes = yes
+
+# When set to "yes", the mailer will suppress the creation of a diff which
+# adds all the lines in the file. If this is set to anything else, or
+# is simply commented out, then the diff will be inserted. Note that the
+# addition is always mentioned in the message header, regardless of this
+# option's value.
+### DEPRECATED (if generate_diffs is not empty, this option is ignored)
+#suppress_adds = yes
+
+# A revision is reported on if any of its changed paths match the
+# for_paths option. If only some of the changed paths of a revision
+# match, this variable controls the behaviour for the non-matching
+# paths. Possible values are:
+#
+# yes: (Default) Show in both summary and diffs.
+# summary: Show the changed paths in the summary, but omit the diffs.
+# no: Show nothing more than a note saying "and changes in other areas"
+#
+show_nonmatching_paths = yes
+
+# Subject line length limit. The generated subject line will be truncated
+# and terminated with "...", to remain within the specified maximum length.
+# Set to 0 to turn off.
+#truncate_subject = 200
+
+# --------------------------------------------------------------------------
+
+[maps]
+
+#
+# This section can be used define rewrite mappings for option values. It
+# is typically used for computing from/to addresses, but can actually be
+# used to remap values for any option in this file.
+#
+# The mappings are global for the entire configuration file. There is
+# no group-specific mapping capability. For each mapping that you want
+# to perform, you will provide the name of the option (e.g. from_addr)
+# and a specification of how to perform those mappings. These declarations
+# are made here in the [maps] section.
+#
+# When an option is accessed, the value is loaded from the configuration
+# file and all %(NAME)s substitutions are performed. The resulting value
+# is then passed through the map. If a map entry is not available for
+# the value, then it will be used unchanged.
+#
+# NOTES: - Avoid using map substitution names which differ only in case.
+# Unexpected results may occur.
+# - A colon ':' is also considered as separator between option and
+# value (keep this in mind when trying to map a file path under
+# windows).
+#
+# The format to declare a map is:
+#
+# option_name_to_remap = mapping_specification
+#
+# At the moment, there is only one type of mapping specification:
+#
+# mapping_specification = '[' sectionname ']'
+#
+# This will use the given section to map values. The option names in
+# the section are the input values, and the option values are the result.
+#
+
+#
+# EXAMPLE:
+#
+# We have two projects using two repositories. The name of the repos
+# does not easily map to their commit mailing lists, so we will use
+# a mapping to go from a project name (extracted from the repository
+# path) to their commit list. The committers also need a special
+# mapping to derive their email address from their repository username.
+#
+# [projects]
+# for_repos = .*/(?P<project>.*)
+# from_addr = %(author)s
+# to_addr = %(project)s
+#
+# [maps]
+# from_addr = [authors]
+# to_addr = [mailing-lists]
+#
+# [authors]
+# john = jconnor@example.com
+# sarah = sconnor@example.com
+#
+# [mailing-lists]
+# t600 = spottable-commits@example.com
+# tx = hotness-commits@example.com
+#
+
+# --------------------------------------------------------------------------
+
+#
+# [example-group]
+# # send notifications if any web pages are changed
+# for_paths = .*\.html
+# # set a custom prefix
+# commit_subject_prefix = [commit]
+# propchange_subject_prefix = [propchange]
+# # override the default, sending these elsewhere
+# to_addr = www-commits@example.com
+# # use the revision author as the from address
+# from_addr =
+# # use a custom diff program for this group
+# diff = /usr/bin/my-diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s
+#
+# [another-example]
+# # commits to personal repositories should go to that person
+# for_repos = /home/(?P<who>[^/]*)/repos
+# to_addr = %(who)s@example.com
+#
+
+[All]
+
+[file plus other areas]
+for_paths = file.*
+
+[file]
+for_paths = file.*
+show_nonmatching_paths = no
+
+[bugtracker]
+search_logmsg = (?P<bugid>(Blaz|Blah)#\d+)
+to_addr = issue-tracker@example.com
+commit_subject_prefix = Fix for %(bugid)s:
+
diff --git a/tools/hook-scripts/persist-ephemeral-txnprops.py b/tools/hook-scripts/persist-ephemeral-txnprops.py
new file mode 100755
index 0000000..6e5697a
--- /dev/null
+++ b/tools/hook-scripts/persist-ephemeral-txnprops.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+import sys
+import os
+from svn import repos, fs, core
+
+def duplicate_ephemeral_txnprops(repos_path, txn_name):
+ fs_ptr = repos.fs(repos.open(repos_path))
+ txn_t = fs.open_txn(fs_ptr, txn_name)
+ for name, value in fs.txn_proplist(txn_t).items():
+ if name.startswith(core.SVN_PROP_TXN_PREFIX):
+ name = core.SVN_PROP_REVISION_PREFIX + \
+ name[len(core.SVN_PROP_TXN_PREFIX):]
+ fs.change_txn_prop(txn_t, name, value)
+
+def usage_and_exit(errmsg=None):
+ stream = errmsg and sys.stderr or sys.stdout
+ stream.write("""\
+Usage:
+
+ persist-ephemeral-txnprops.py REPOS_PATH TXN_NAME
+
+Duplicate ephemeral transaction properties so that the information
+they carry may persist as properties of the revision created once the
+transaction is committed. This is intended to be used as a Subversion
+pre-commit hook script.
+
+REPOS_PATH is the on-disk path of the repository whose transaction
+properties are being examined/modified. TXN_NAME is the name of the
+transaction.
+
+Ephemeral transaction properties, whose names all begin with the
+prefix "%s", will be copied to new properties which use the
+prefix "%s" instead.
+
+""" % (core.SVN_PROP_TXN_PREFIX, core.SVN_PROP_REVISION_PREFIX))
+ if errmsg:
+ stream.write("ERROR: " + errmsg + "\n")
+ sys.exit(errmsg and 1 or 0)
+
+def main():
+ argc = len(sys.argv)
+ if argc != 3:
+ usage_and_exit("Incorrect number of arguments.")
+ repos_path = sys.argv[1]
+ txn_name = sys.argv[2]
+ duplicate_ephemeral_txnprops(repos_path, txn_name)
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/hook-scripts/reject-detected-sha1-collisions.sh b/tools/hook-scripts/reject-detected-sha1-collisions.sh
new file mode 100755
index 0000000..b2e14a9
--- /dev/null
+++ b/tools/hook-scripts/reject-detected-sha1-collisions.sh
@@ -0,0 +1,50 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# $Id: reject-detected-sha1-collisions.sh 1794454 2017-05-08 20:34:56Z astieger $
+#
+# Prevents detected SHA-1 collisions from being committed.
+# Uses sha1dcsum of sha1collisiondetection to detect
+# crytoanalytic collision attacks against SHA-1. The
+# detection works on a single side of the collision.
+# https://github.com/cr-marcstevens/sha1collisiondetection
+# commit 5ee29e5 or later
+
+REPOS="$1"
+TXN="$2"
+SVNLOOK=/usr/bin/svnlook
+GREP=/usr/bin/grep
+SED=/usr/bin/sed
+HEAD=/usr/bin/head
+SHA1DCSUM=/usr/bin/sha1dcsum
+
+$SVNLOOK changed -t "$TXN" "$REPOS"
+if [ $? -ne 0 ]; then
+ echo "svnlook failed, possible SHA-1 collision" >&2
+ exit 2
+fi
+
+$SVNLOOK changed -t "$TXN" "$REPOS" | $GREP -Ev '^D ' | $SED -e 's/^. //' | $GREP -v '/$' | while IFS= read -r FILE; do
+ $SVNLOOK cat -t "$TXN" "$REPOS" "$FILE" | $SHA1DCSUM - | $GREP -qv " \*coll\* "
+ if [ $? -ne 0 ]; then
+ echo "detected SHA-1 collision rejected" >&2
+ exit 3
+ fi
+done
diff --git a/tools/hook-scripts/reject-known-sha1-collisions.sh b/tools/hook-scripts/reject-known-sha1-collisions.sh
new file mode 100755
index 0000000..d816db8
--- /dev/null
+++ b/tools/hook-scripts/reject-known-sha1-collisions.sh
@@ -0,0 +1,50 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# $Id: reject-known-sha1-collisions.sh 1784763 2017-02-28 15:40:00Z stsp $
+#
+# Prevents some SHA-1 collisions to be committed
+# Test for the 320 byte prefix found on https://shattered.io/
+# If the files are committed in the same transaction, svnlook
+# will error out itself due to the apparent corruption in the
+# candidate revision
+
+REPOS="$1"
+TXN="$2"
+SVNLOOK=/usr/bin/svnlook
+GREP=/usr/bin/grep
+SED=/usr/bin/sed
+# GNU coreutils versions of these tools are required:
+SHA1SUM=/usr/bin/sha1sum
+HEAD=/usr/bin/head
+
+$SVNLOOK changed -t "$TXN" "$REPOS"
+if [ $? -ne 0 ]; then
+ echo "svnlook failed, possible SHA-1 collision" >&2
+ exit 2
+fi
+
+$SVNLOOK changed -t "$TXN" "$REPOS" | $GREP -Ev '^D ' | $SED -e 's/^. //' | $GREP -v '/$' | while IFS= read -r FILE; do
+ PREFIX=`$SVNLOOK cat -t "$TXN" "$REPOS" "$FILE" | $HEAD -c320 | $SHA1SUM | cut -c-40`
+ if [ x"$PREFIX" = x'f92d74e3874587aaf443d1db961d4e26dde13e9c' ]; then
+ echo "known SHA-1 collision rejected" >&2
+ exit 3
+ fi
+done
diff --git a/tools/hook-scripts/svn2feed.py b/tools/hook-scripts/svn2feed.py
new file mode 100755
index 0000000..c3abe8c
--- /dev/null
+++ b/tools/hook-scripts/svn2feed.py
@@ -0,0 +1,466 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+"""Usage: svn2feed.py [OPTION...] REPOS-PATH
+
+Generate an RSS 2.0 or Atom 1.0 feed file containing commit
+information for the Subversion repository located at REPOS-PATH. Once
+the maximum number of items is reached, older elements are removed.
+The item title is the revision number, and the item description
+contains the author, date, log messages and changed paths.
+
+Options:
+
+ -h, --help Show this help message.
+
+ -F, --format=FORMAT Required option. FORMAT must be one of:
+ 'rss' (RSS 2.0)
+ 'atom' (Atom 1.0)
+ to select the appropriate feed format.
+
+ -f, --feed-file=PATH Store the feed in the file located at PATH, which will
+ be created if it does not exist, or overwritten if it
+ does. If not provided, the script will store the feed
+ in the current working directory, in a file named
+ REPOS_NAME.rss or REPOS_NAME.atom (where REPOS_NAME is
+ the basename of the REPOS_PATH command-line argument,
+ and the file extension depends on the selected
+ format).
+
+ -r, --revision=X[:Y] Subversion revision (or revision range) to generate
+ info for. If not provided, info for the single
+ youngest revision in the repository will be generated.
+
+ -m, --max-items=N Keep only N items in the feed file. By default,
+ 20 items are kept.
+
+ -u, --item-url=URL Use URL as the basis for generating feed item links.
+ This value is appended with '?rev=REV_NUMBER' to form
+ the actual item links.
+
+ -U, --feed-url=URL Use URL as the global link associated with the feed.
+
+ -P, --svn-path=DIR Look in DIR for the svnlook binary. If not provided,
+ svnlook must be on the PATH.
+"""
+
+# TODO:
+# --item-url should support arbitrary formatting of the revision number,
+# to be useful with web viewers other than ViewVC.
+# Rather more than intended is being cached in the pickle file. Instead of
+# only old items being drawn from the pickle, all the global feed metadata
+# is actually set only on initial feed creation, and thereafter simply
+# re-used from the pickle each time.
+
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/hook-scripts/svn2feed.py $
+# $LastChangedDate: 2016-04-30 08:16:53 +0000 (Sat, 30 Apr 2016) $
+# $LastChangedBy: stefan2 $
+# $LastChangedRevision: 1741723 $
+
+import sys
+
+# Python 2.4 is required for subprocess
+if sys.version_info < (2, 4):
+ sys.stderr.write("Error: Python 2.4 or higher required.\n")
+ sys.stderr.flush()
+ sys.exit(1)
+
+import getopt
+import os
+import subprocess
+try:
+ # Python <3.0
+ import cPickle as pickle
+except ImportError:
+ # Python >=3.0
+ import pickle
+import datetime
+import time
+
+def usage_and_exit(errmsg=None):
+ """Print a usage message, plus an ERRMSG (if provided), then exit.
+ If ERRMSG is provided, the usage message is printed to stderr and
+ the script exits with a non-zero error code. Otherwise, the usage
+ message goes to stdout, and the script exits with a zero
+ errorcode."""
+ if errmsg is None:
+ stream = sys.stdout
+ else:
+ stream = sys.stderr
+ stream.write("%s\n" % __doc__)
+ stream.flush()
+ if errmsg:
+ stream.write("\nError: %s\n" % errmsg)
+ stream.flush()
+ sys.exit(2)
+ sys.exit(0)
+
+def check_url(url, opt):
+ """Verify that URL looks like a valid URL or option OPT."""
+ if not (url.startswith('https://') \
+ or url.startswith('http://') \
+ or url.startswith('file://')):
+ usage_and_exit("svn2feed.py: Invalid url '%s' is specified for " \
+ "'%s' option" % (url, opt))
+
+
+class Svn2Feed:
+ def __init__(self, svn_path, repos_path, item_url, feed_file,
+ max_items, feed_url):
+ self.repos_path = repos_path
+ self.item_url = item_url
+ self.feed_file = feed_file
+ self.max_items = max_items
+ self.feed_url = feed_url
+ self.svnlook_cmd = 'svnlook'
+ if svn_path is not None:
+ self.svnlook_cmd = os.path.join(svn_path, 'svnlook')
+ self.feed_title = ("%s's Subversion Commits Feed"
+ % (os.path.basename(os.path.abspath(self.repos_path))))
+ self.feed_desc = "The latest Subversion commits"
+
+ def _get_item_dict(self, revision):
+ revision = str(revision)
+
+ cmd = [self.svnlook_cmd, 'info', '-r', revision, self.repos_path]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ proc.wait()
+ info_lines = proc.stdout.readlines()
+
+ cmd = [self.svnlook_cmd, 'changed', '-r', revision, self.repos_path]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ proc.wait()
+ changed_data = proc.stdout.readlines()
+
+ desc = ("\nRevision: %s\nLog: %sModified: \n%s"
+ % (revision, info_lines[3], changed_data))
+
+ item_dict = {
+ 'author': info_lines[0].strip('\n'),
+ 'title': "Revision %s" % revision,
+ 'link': self.item_url and "%s?rev=%s" % (self.item_url, revision),
+ 'date': self._format_updated_ts(info_lines[1]),
+ 'description': "<pre>" + desc + "</pre>",
+ }
+
+ return item_dict
+
+ def _format_updated_ts(self, revision_ts):
+
+ # Get "2006-08-10 20:17:08" from
+ # "2006-07-28 20:17:18 +0530 (Fri, 28 Jul 2006)
+ date = revision_ts[0:19]
+ epoch = time.mktime(time.strptime(date, "%Y-%m-%d %H:%M:%S"))
+ return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(epoch))
+
+
+class Svn2RSS(Svn2Feed):
+ def __init__(self, svn_path, repos_path, item_url, feed_file,
+ max_items, feed_url):
+ Svn2Feed.__init__(self, svn_path, repos_path, item_url, feed_file,
+ max_items, feed_url)
+ try:
+ import PyRSS2Gen
+ except ImportError:
+ sys.stderr.write("""
+Error: Required PyRSS2Gen module not found. You can download the PyRSS2Gen
+module from:
+
+ http://www.dalkescientific.com/Python/PyRSS2Gen.html
+
+""")
+ sys.exit(1)
+ self.PyRSS2Gen = PyRSS2Gen
+
+ (file, ext) = os.path.splitext(self.feed_file)
+ self.pickle_file = file + ".pickle"
+ if os.path.exists(self.pickle_file):
+ self.rss = pickle.load(open(self.pickle_file, "r"))
+ else:
+ self.rss = self.PyRSS2Gen.RSS2(
+ title = self.feed_title,
+ link = self.feed_url,
+ description = self.feed_desc,
+ lastBuildDate = datetime.datetime.now(),
+ items = [])
+
+ @staticmethod
+ def get_default_file_extension():
+ return ".rss"
+
+ def add_revision_item(self, revision):
+ rss_item = self._make_rss_item(revision)
+ self.rss.items.insert(0, rss_item)
+ if len(self.rss.items) > self.max_items:
+ del self.rss.items[self.max_items:]
+
+ def write_output(self):
+ s = pickle.dumps(self.rss)
+ f = open(self.pickle_file, "w")
+ f.write(s)
+ f.close()
+
+ f = open(self.feed_file, "w")
+ self.rss.write_xml(f)
+ f.close()
+
+ def _make_rss_item(self, revision):
+ info = self._get_item_dict(revision)
+
+ rss_item = self.PyRSS2Gen.RSSItem(
+ author = info['author'],
+ title = info['title'],
+ link = info['link'],
+ description = info['description'],
+ guid = self.PyRSS2Gen.Guid(info['link']),
+ pubDate = info['date'])
+ return rss_item
+
+
+class Svn2Atom(Svn2Feed):
+ def __init__(self, svn_path, repos_path, item_url, feed_file,
+ max_items, feed_url):
+ Svn2Feed.__init__(self, svn_path, repos_path, item_url, feed_file,
+ max_items, feed_url)
+ from xml.dom import getDOMImplementation
+ self.dom_impl = getDOMImplementation()
+
+ self.pickle_file = self.feed_file + ".pickle"
+ if os.path.exists(self.pickle_file):
+ self.document = pickle.load(open(self.pickle_file, "r"))
+ self.feed = self.document.getElementsByTagName('feed')[0]
+ else:
+ self._init_atom_document()
+
+ @staticmethod
+ def get_default_file_extension():
+ return ".atom"
+
+ def add_revision_item(self, revision):
+ item = self._make_atom_item(revision)
+
+ total = 0
+ for childNode in self.feed.childNodes:
+ if childNode.nodeName == 'entry':
+ if total == 0:
+ self.feed.insertBefore(item, childNode)
+ total += 1
+ total += 1
+ if total > self.max_items:
+ self.feed.removeChild(childNode)
+ if total == 0:
+ self.feed.appendChild(item)
+
+ def write_output(self):
+ s = pickle.dumps(self.document)
+ f = open(self.pickle_file, "w")
+ f.write(s)
+ f.close()
+
+ f = open(self.feed_file, "w")
+ f.write(self.document.toxml())
+ f.close()
+
+ def _make_atom_item(self, revision):
+ info = self._get_item_dict(revision)
+
+ doc = self.document
+ entry = doc.createElement("entry")
+
+ id = doc.createElement("id")
+ entry.appendChild(id)
+ id.appendChild(doc.createTextNode(info['link']))
+
+ title = doc.createElement("title")
+ entry.appendChild(title)
+ title.appendChild(doc.createTextNode(info['title']))
+
+ updated = doc.createElement("updated")
+ entry.appendChild(updated)
+ updated.appendChild(doc.createTextNode(info['date']))
+
+ link = doc.createElement("link")
+ entry.appendChild(link)
+ link.setAttribute("href", info['link'])
+
+ summary = doc.createElement("summary")
+ entry.appendChild(summary)
+ summary.appendChild(doc.createTextNode(info['description']))
+
+ author = doc.createElement("author")
+ entry.appendChild(author)
+ aname = doc.createElement("name")
+ author.appendChild(aname)
+ aname.appendChild(doc.createTextNode(info['author']))
+
+ return entry
+
+ def _init_atom_document(self):
+ doc = self.document = self.dom_impl.createDocument(None, None, None)
+ feed = self.feed = doc.createElement("feed")
+ doc.appendChild(feed)
+
+ feed.setAttribute("xmlns", "http://www.w3.org/2005/Atom")
+
+ title = doc.createElement("title")
+ feed.appendChild(title)
+ title.appendChild(doc.createTextNode(self.feed_title))
+
+ id = doc.createElement("id")
+ feed.appendChild(id)
+ id.appendChild(doc.createTextNode(self.feed_url))
+
+ updated = doc.createElement("updated")
+ feed.appendChild(updated)
+ now = datetime.datetime.now()
+ updated.appendChild(doc.createTextNode(self._format_date(now)))
+
+ link = doc.createElement("link")
+ feed.appendChild(link)
+ link.setAttribute("href", self.feed_url)
+
+ author = doc.createElement("author")
+ feed.appendChild(author)
+ aname = doc.createElement("name")
+ author.appendChild(aname)
+ aname.appendChild(doc.createTextNode("subversion"))
+
+ def _format_date(self, dt):
+ """ input date must be in GMT """
+ return ("%04d-%02d-%02dT%02d:%02d:%02d.%02dZ"
+ % (dt.year, dt.month, dt.day, dt.hour, dt.minute,
+ dt.second, dt.microsecond))
+
+
+def main():
+ # Parse the command-line options and arguments.
+ try:
+ opts, args = getopt.gnu_getopt(sys.argv[1:], "hP:r:u:f:m:U:F:",
+ ["help",
+ "svn-path=",
+ "revision=",
+ "item-url=",
+ "feed-file=",
+ "max-items=",
+ "feed-url=",
+ "format=",
+ ])
+ except getopt.GetoptError as msg:
+ usage_and_exit(msg)
+
+ # Make sure required arguments are present.
+ if len(args) != 1:
+ usage_and_exit("You must specify a repository path.")
+ repos_path = os.path.abspath(args[0])
+
+ # Now deal with the options.
+ max_items = 20
+ commit_rev = svn_path = None
+ item_url = feed_url = None
+ feed_file = None
+ feedcls = None
+ feed_classes = { 'rss': Svn2RSS, 'atom': Svn2Atom }
+
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ usage_and_exit()
+ elif opt in ("-P", "--svn-path"):
+ svn_path = arg
+ elif opt in ("-r", "--revision"):
+ commit_rev = arg
+ elif opt in ("-u", "--item-url"):
+ item_url = arg
+ check_url(item_url, opt)
+ elif opt in ("-f", "--feed-file"):
+ feed_file = arg
+ elif opt in ("-m", "--max-items"):
+ try:
+ max_items = int(arg)
+ except ValueError as msg:
+ usage_and_exit("Invalid value '%s' for --max-items." % (arg))
+ if max_items < 1:
+ usage_and_exit("Value for --max-items must be a positive "
+ "integer.")
+ elif opt in ("-U", "--feed-url"):
+ feed_url = arg
+ check_url(feed_url, opt)
+ elif opt in ("-F", "--format"):
+ try:
+ feedcls = feed_classes[arg]
+ except KeyError:
+ usage_and_exit("Invalid value '%s' for --format." % arg)
+
+ if feedcls is None:
+ usage_and_exit("Option -F [--format] is required.")
+
+ if item_url is None:
+ usage_and_exit("Option -u [--item-url] is required.")
+
+ if feed_url is None:
+ usage_and_exit("Option -U [--feed-url] is required.")
+
+ if commit_rev is None:
+ svnlook_cmd = 'svnlook'
+ if svn_path is not None:
+ svnlook_cmd = os.path.join(svn_path, 'svnlook')
+ cmd = [svnlook_cmd, 'youngest', repos_path]
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ proc.wait()
+ cmd_out = proc.stdout.readlines()
+ try:
+ revisions = [int(cmd_out[0])]
+ except IndexError as msg:
+ usage_and_exit("svn2feed.py: Invalid value '%s' for " \
+ "REPOS-PATH" % (repos_path))
+ else:
+ try:
+ rev_range = commit_rev.split(':')
+ len_rev_range = len(rev_range)
+ if len_rev_range == 1:
+ revisions = [int(commit_rev)]
+ elif len_rev_range == 2:
+ start, end = rev_range
+ start = int(start)
+ end = int(end)
+ if (start > end):
+ tmp = start
+ start = end
+ end = tmp
+ revisions = list(range(start, end + 1)[-max_items:])
+ else:
+ raise ValueError()
+ except ValueError as msg:
+ usage_and_exit("svn2feed.py: Invalid value '%s' for --revision." \
+ % (commit_rev))
+
+ if feed_file is None:
+ feed_file = (os.path.basename(repos_path) +
+ feedcls.get_default_file_extension())
+
+ feed = feedcls(svn_path, repos_path, item_url, feed_file, max_items,
+ feed_url)
+ for revision in revisions:
+ feed.add_revision_item(revision)
+ feed.write_output()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/hook-scripts/svnperms.conf.example b/tools/hook-scripts/svnperms.conf.example
new file mode 100644
index 0000000..29c895a
--- /dev/null
+++ b/tools/hook-scripts/svnperms.conf.example
@@ -0,0 +1,98 @@
+## Example configuration file for svnperms.py. ##
+
+# $Id: svnperms.conf.example 1028289 2010-10-28 13:20:45Z cmpilato $
+#
+# Multiple global [groups] sections are accepted, but be aware
+# that it's the same as concatenating them all in a single entry.
+# You can also create section specific groups, using a syntax
+# like [sectionname groups].
+#
+[groups]
+group1 = user1 user2 user3
+group2 = user4 user5
+supergroup = @group1 @group2 user6
+
+#
+# Example repository control, showing allowed syntax.
+#
+# - the latest match is what counts
+# - groups are prefixed by "@"
+# - you can use groups and users in the same definition
+# - all permissions may be revoked with ()
+# - line breaks are accepted
+#
+[example1 groups]
+group3 = user9 user10
+
+[example1]
+trunk/.* = *(add,remove,update) @group1,user4,user5(update)
+ user6,user7()
+trunk/.* = user8(add,update)
+tags/[^/]+/ = @group3(add)
+branches/[^/]+/.* = *(add,remove,update)
+
+#
+# One of the most used repository structures, for a single project.
+#
+[example2]
+trunk/.* = *(add,remove,update)
+tags/[^/]+/ = *(add)
+branches/[^/]+/.* = *(add,remove,update)
+
+#
+# Another common structure, expecting a project name inside the repository
+# (like trunk/myproject/ and tags/myproject/). In this example, only admins
+# are allowed to create projects, and there are project specific access
+# lists.
+#
+[example3 groups]
+admins = john
+project1 = user1 user2
+project2 = user3 user4
+
+[example3]
+trunk/[^/]+/ = @admins(add,remove)
+trunk/project1/.+ = @project1(add,remove,update)
+trunk/project2/.+ = @project2(add,remove,update)
+tags/[^/]+/ = @admins(add,remove)
+tags/project1/[^/]+/ = @project1(add,remove)
+tags/project2/[^/]+/ = @project2(add,remove)
+branches/[^/]+/ = @admins(add,remove)
+branches/project1/[^/]+/.* = @project1(add,remove,update)
+branches/project2/[^/]+/.* = @project2(add,remove,update)
+
+#
+# A more complex structure, as defined in the following URL:
+#
+# http://moin.conectiva.com.br/RepositorySystem
+#
+[example4 groups]
+admins = user1 user2
+updaters = user3
+
+[example4]
+snapshot/[^/]+/(current/(SPECS/|SOURCES/)?)? = *(add)
+snapshot/[^/]+/ = @admins(add,remove)
+snapshot/[^/]+/current/SPECS/[^/]+\.spec = *(add,remove,update)
+snapshot/[^/]+/current/SOURCES/[^/]+ = *(add,remove,update)
+snapshot/[^/]+/releases/[^/]+/([^/+]/)? = autouser(add)
+snapshot/[^/]+/pristine/ = autouser(add,remove)
+branches/[^/]+/.* = *(add,remove,update)
+releases/[^/]+/ = @admins(add)
+tags/[^/]+/ = *(add,remove)
+updates/[^/]+/[^/]+/(current/(SPECS/|SOURCES/)?)? = @updaters,autouser(add)
+updates/[^/]+/[^/]+/current/SPECS/[^/]+\.spec = @updaters,autouser(add,update)
+updates/[^/]+/[^/]+/current/SOURCES/[^/]+ = @updaters,autouser(add,remove,update)
+updates/[^/]+/[^/]+/releases/.* = autouser(add)
+updates/[^/]+/[^/]+/pristine/ = autouser(add,remove)
+
+#
+# Sections can inherit settings from previously defined sections, using
+# the "extends" keyword in the section declaration. In this example,
+# the [example5] section inherits all the settings from [example2], and
+# adds a new setting for a releases directory which behaves like the
+# tags directory.
+#
+[example5 extends example2]
+releases/[^/]+/ = *(add)
+
diff --git a/tools/hook-scripts/svnperms.py b/tools/hook-scripts/svnperms.py
new file mode 100755
index 0000000..6f059fa
--- /dev/null
+++ b/tools/hook-scripts/svnperms.py
@@ -0,0 +1,363 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/hook-scripts/svnperms.py $
+# $LastChangedDate: 2016-04-30 08:16:53 +0000 (Sat, 30 Apr 2016) $
+# $LastChangedBy: stefan2 $
+# $LastChangedRevision: 1741723 $
+
+import sys, os
+import getopt
+import shlex
+
+try:
+ # Python >=3.0
+ from subprocess import getstatusoutput as subprocess_getstatusoutput
+except ImportError:
+ # Python <3.0
+ from commands import getstatusoutput as subprocess_getstatusoutput
+try:
+ my_getopt = getopt.gnu_getopt
+except AttributeError:
+ my_getopt = getopt.getopt
+import re
+
+__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
+
+class Error(Exception): pass
+
+SECTION = re.compile(r'\[([^]]+?)(?:\s+extends\s+([^]]+))?\]')
+OPTION = re.compile(r'(\S+)\s*=\s*(.*)$')
+
+class Config:
+ def __init__(self, filename):
+ # Options are stored in __sections_list like this:
+ # [(sectname, [(optname, optval), ...]), ...]
+ self._sections_list = []
+ self._sections_dict = {}
+ self._read(filename)
+
+ def _read(self, filename):
+ # Use the same logic as in ConfigParser.__read()
+ file = open(filename)
+ cursectdict = None
+ optname = None
+ lineno = 0
+ for line in file:
+ lineno = lineno + 1
+ if line.isspace() or line[0] == '#':
+ continue
+ if line[0].isspace() and cursectdict is not None and optname:
+ value = line.strip()
+ cursectdict[optname] = "%s %s" % (cursectdict[optname], value)
+ cursectlist[-1][1] = "%s %s" % (cursectlist[-1][1], value)
+ else:
+ m = SECTION.match(line)
+ if m:
+ sectname = m.group(1)
+ parentsectname = m.group(2)
+ if parentsectname is None:
+ # No parent section defined, so start a new section
+ cursectdict = self._sections_dict.setdefault \
+ (sectname, {})
+ cursectlist = []
+ else:
+ # Copy the parent section into the new section
+ parentsectdict = self._sections_dict.get \
+ (parentsectname, {})
+ cursectdict = self._sections_dict.setdefault \
+ (sectname, parentsectdict.copy())
+ cursectlist = self.walk(parentsectname)
+ self._sections_list.append((sectname, cursectlist))
+ optname = None
+ elif cursectdict is None:
+ raise Error("%s:%d: no section header" % \
+ (filename, lineno))
+ else:
+ m = OPTION.match(line)
+ if m:
+ optname, optval = m.groups()
+ optval = optval.strip()
+ cursectdict[optname] = optval
+ cursectlist.append([optname, optval])
+ else:
+ raise Error("%s:%d: parsing error" % \
+ (filename, lineno))
+
+ def sections(self):
+ return list(self._sections_dict.keys())
+
+ def options(self, section):
+ return list(self._sections_dict.get(section, {}).keys())
+
+ def get(self, section, option, default=None):
+ return self._sections_dict.get(option, default)
+
+ def walk(self, section, option=None):
+ ret = []
+ for sectname, options in self._sections_list:
+ if sectname == section:
+ for optname, value in options:
+ if not option or optname == option:
+ ret.append((optname, value))
+ return ret
+
+
+class Permission:
+ def __init__(self):
+ self._group = {}
+ self._permlist = []
+
+ def parse_groups(self, groupsiter):
+ for option, value in groupsiter:
+ groupusers = []
+ for token in shlex.split(value):
+ # expand nested groups in place; no forward decls
+ if token[0] == "@":
+ try:
+ groupusers.extend(self._group[token[1:]])
+ except KeyError:
+ raise Error, "group '%s' not found" % token[1:]
+ else:
+ groupusers.append(token)
+ self._group[option] = groupusers
+
+ def parse_perms(self, permsiter):
+ for option, value in permsiter:
+ # Paths never start with /, so remove it if provided
+ if option[0] == "/":
+ option = option[1:]
+ pattern = re.compile("^%s$" % option)
+ for entry in value.split():
+ openpar, closepar = entry.find("("), entry.find(")")
+ groupsusers = entry[:openpar].split(",")
+ perms = entry[openpar+1:closepar].split(",")
+ users = []
+ for groupuser in groupsusers:
+ if groupuser[0] == "@":
+ try:
+ users.extend(self._group[groupuser[1:]])
+ except KeyError:
+ raise Error("group '%s' not found" % \
+ groupuser[1:])
+ else:
+ users.append(groupuser)
+ self._permlist.append((pattern, users, perms))
+
+ def get(self, user, path):
+ ret = []
+ for pattern, users, perms in self._permlist:
+ if pattern.match(path) and (user in users or "*" in users):
+ ret = perms
+ return ret
+
+class SVNLook:
+ def __init__(self, repospath, txn=None, rev=None):
+ self.repospath = repospath
+ self.txn = txn
+ self.rev = rev
+
+ def _execcmd(self, *cmd, **kwargs):
+ cmdstr = " ".join(cmd)
+ status, output = subprocess_getstatusoutput(cmdstr)
+ if status != 0:
+ sys.stderr.write(cmdstr)
+ sys.stderr.write("\n")
+ sys.stderr.write(output)
+ raise Error("command failed: %s\n%s" % (cmdstr, output))
+ return status, output
+
+ def _execsvnlook(self, cmd, *args, **kwargs):
+ execcmd_args = ["svnlook", cmd, self.repospath]
+ self._add_txnrev(execcmd_args, kwargs)
+ execcmd_args += args
+ execcmd_kwargs = {}
+ keywords = ["show", "noerror"]
+ for key in keywords:
+ if key in kwargs:
+ execcmd_kwargs[key] = kwargs[key]
+ return self._execcmd(*execcmd_args, **execcmd_kwargs)
+
+ def _add_txnrev(self, cmd_args, received_kwargs):
+ if "txn" in received_kwargs:
+ txn = received_kwargs.get("txn")
+ if txn is not None:
+ cmd_args += ["-t", txn]
+ elif self.txn is not None:
+ cmd_args += ["-t", self.txn]
+ if "rev" in received_kwargs:
+ rev = received_kwargs.get("rev")
+ if rev is not None:
+ cmd_args += ["-r", rev]
+ elif self.rev is not None:
+ cmd_args += ["-r", self.rev]
+
+ def changed(self, **kwargs):
+ status, output = self._execsvnlook("changed", **kwargs)
+ if status != 0:
+ return None
+ changes = []
+ for line in output.splitlines():
+ line = line.rstrip()
+ if not line: continue
+ entry = [None, None, None]
+ changedata, changeprop, path = None, None, None
+ if line[0] != "_":
+ changedata = line[0]
+ if line[1] != " ":
+ changeprop = line[1]
+ path = line[4:]
+ changes.append((changedata, changeprop, path))
+ return changes
+
+ def author(self, **kwargs):
+ status, output = self._execsvnlook("author", **kwargs)
+ if status != 0:
+ return None
+ return output.strip()
+
+
+def check_perms(filename, section, repos, txn=None, rev=None, author=None):
+ svnlook = SVNLook(repos, txn=txn, rev=rev)
+ if author is None:
+ author = svnlook.author()
+ changes = svnlook.changed()
+ try:
+ config = Config(filename)
+ except IOError:
+ raise Error("can't read config file "+filename)
+ if not section in config.sections():
+ raise Error("section '%s' not found in config file" % section)
+ perm = Permission()
+ perm.parse_groups(config.walk("groups"))
+ perm.parse_groups(config.walk(section+" groups"))
+ perm.parse_perms(config.walk(section))
+ permerrors = []
+ for changedata, changeprop, path in changes:
+ pathperms = perm.get(author, path)
+ if changedata == "A" and "add" not in pathperms:
+ permerrors.append("you can't add "+path)
+ elif changedata == "U" and "update" not in pathperms:
+ permerrors.append("you can't update "+path)
+ elif changedata == "D" and "remove" not in pathperms:
+ permerrors.append("you can't remove "+path)
+ elif changeprop == "U" and "update" not in pathperms:
+ permerrors.append("you can't update properties of "+path)
+ #else:
+ # print "cdata=%s cprop=%s path=%s perms=%s" % \
+ # (str(changedata), str(changeprop), path, str(pathperms))
+ if permerrors:
+ permerrors.insert(0, "you don't have enough permissions for "
+ "this transaction:")
+ raise Error("\n".join(permerrors))
+
+
+# Command:
+
+USAGE = """\
+Usage: svnperms.py OPTIONS
+
+Options:
+ -r PATH Use repository at PATH to check transactions
+ -t TXN Query transaction TXN for commit information
+ -f PATH Use PATH as configuration file (default is repository
+ path + /conf/svnperms.conf)
+ -s NAME Use section NAME as permission section (default is
+ repository name, extracted from repository path)
+ -R REV Query revision REV for commit information (for tests)
+ -A AUTHOR Check commit as if AUTHOR had committed it (for tests)
+ -h Show this message
+"""
+
+class MissingArgumentsException(Exception):
+ "Thrown when required arguments are missing."
+ pass
+
+def parse_options():
+ try:
+ opts, args = my_getopt(sys.argv[1:], "f:s:r:t:R:A:h", ["help"])
+ except getopt.GetoptError as e:
+ raise Error(e.msg)
+ class Options: pass
+ obj = Options()
+ obj.filename = None
+ obj.section = None
+ obj.repository = None
+ obj.transaction = None
+ obj.revision = None
+ obj.author = None
+ for opt, val in opts:
+ if opt == "-f":
+ obj.filename = val
+ elif opt == "-s":
+ obj.section = val
+ elif opt == "-r":
+ obj.repository = val
+ elif opt == "-t":
+ obj.transaction = val
+ elif opt == "-R":
+ obj.revision = val
+ elif opt == "-A":
+ obj.author = val
+ elif opt in ["-h", "--help"]:
+ sys.stdout.write(USAGE)
+ sys.exit(0)
+ missingopts = []
+ if not obj.repository:
+ missingopts.append("repository")
+ if not (obj.transaction or obj.revision):
+ missingopts.append("either transaction or a revision")
+ if missingopts:
+ raise MissingArgumentsException("missing required option(s): " + ", ".join(missingopts))
+ obj.repository = os.path.abspath(obj.repository)
+ if obj.filename is None:
+ obj.filename = os.path.join(obj.repository, "conf", "svnperms.conf")
+ if obj.section is None:
+ obj.section = os.path.basename(obj.repository)
+ if not (os.path.isdir(obj.repository) and
+ os.path.isdir(os.path.join(obj.repository, "db")) and
+ os.path.isdir(os.path.join(obj.repository, "hooks")) and
+ os.path.isfile(os.path.join(obj.repository, "format"))):
+ raise Error("path '%s' doesn't look like a repository" % \
+ obj.repository)
+
+ return obj
+
+def main():
+ try:
+ opts = parse_options()
+ check_perms(opts.filename, opts.section,
+ opts.repository, opts.transaction, opts.revision,
+ opts.author)
+ except MissingArgumentsException as e:
+ sys.stderr.write("%s\n" % str(e))
+ sys.stderr.write(USAGE)
+ sys.exit(1)
+ except Error as e:
+ sys.stderr.write("error: %s\n" % str(e))
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main()
+
+# vim:et:ts=4:sw=4
diff --git a/tools/hook-scripts/validate-extensions.py b/tools/hook-scripts/validate-extensions.py
new file mode 100755
index 0000000..ed0283d
--- /dev/null
+++ b/tools/hook-scripts/validate-extensions.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+"""\
+Check that any files pending commit into a Subversion repository have
+suitable file extensions, printing an error and exiting with an
+errorful value if any files fail validation. This is intended to be
+used as a Subversion pre-commit hook script.
+
+Syntax 1:
+
+ validate-extensions.py REPOS_PATH TXN_NAME deny EXT [...]
+
+ Ensure that any newly added files do *not* have one of the provided
+ file extensions.
+
+
+Syntax 2:
+
+ validate-extensions.py REPOS_PATH TXN_NAME allow EXT [...]
+
+ Ensure that any newly added files *do* have one of the provided
+ file extensions. (Extension-less files are disallowed.)
+
+"""
+
+import sys
+import os
+from svn import repos, fs, core
+
+def validate_added_extensions(repos_path, txn_name, extensions, action):
+ # Open the repository and transaction.
+ fs_ptr = repos.fs(repos.open(repos_path))
+ txn_t = fs.open_txn(fs_ptr, txn_name)
+ txn_root = fs.txn_root(txn_t)
+
+ # Fetch the changes made in this transaction.
+ changes = fs.svn_fs_paths_changed(txn_root)
+ paths = changes.keys()
+
+ # Check the changes.
+ for path in paths:
+ change = changes[path]
+
+ # Always allow deletions.
+ if change.change_kind == fs.path_change_delete:
+ continue
+
+ # Always allow non-files.
+ kind = fs.check_path(txn_root, path)
+ if kind != core.svn_node_file:
+ continue
+
+ # If this was a newly added (without history) file ...
+ if ((change.change_kind == fs.path_change_replace) \
+ or (change.change_kind == fs.path_change_add)):
+ copyfrom_rev, copyfrom_path = fs.copied_from(txn_root, path)
+ if copyfrom_rev == core.SVN_INVALID_REVNUM:
+
+ # ... then check it for a valid extension.
+ base, ext = os.path.splitext(path)
+ if ext:
+ ext = ext[1:].lower()
+ if ((ext in extensions) and (action == 'deny')) \
+ or ((ext not in extensions) and (action == 'allow')):
+ sys.stderr.write("Path '%s' has an extension disallowed by server "
+ "configuration.\n" % (path))
+ sys.exit(1)
+
+def usage_and_exit(errmsg=None):
+ stream = errmsg and sys.stderr or sys.stdout
+ stream.write(__doc__)
+ if errmsg:
+ stream.write("ERROR: " + errmsg + "\n")
+ sys.exit(errmsg and 1 or 0)
+
+def main():
+ argc = len(sys.argv)
+ if argc < 5:
+ usage_and_exit("Not enough arguments.")
+ repos_path = sys.argv[1]
+ txn_name = sys.argv[2]
+ action = sys.argv[3]
+ if action not in ("allow", "deny"):
+ usage_and_exit("Invalid action '%s'. Expected either 'allow' or 'deny'."
+ % (action))
+ extensions = [x.lower() for x in sys.argv[4:]]
+ validate_added_extensions(repos_path, txn_name, extensions, action)
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/hook-scripts/validate-files.conf.example b/tools/hook-scripts/validate-files.conf.example
new file mode 100644
index 0000000..f37981f
--- /dev/null
+++ b/tools/hook-scripts/validate-files.conf.example
@@ -0,0 +1,69 @@
+# DEFAULT section can be used to place options that can be referenced in
+# other section values with the %(option)s syntax. Note that the svnlook
+# value below is required as it is used by the script to determine the path
+# to the svnlook command in order to determine the changes. Feel free
+# to create additional values here that you can reuse in other options,
+# especially the command options to make it easier to maintain.
+[DEFAULT]
+svnlook = /usr/local/bin/svnlook
+#svnauthz = /usr/local/bin/svn-tools/svnauthz
+#xmllint = /usr/bin/xmllint
+
+# The repositories section has key value pairs where the key is a pattern
+# to match on the repository path and the value is a space separated list of
+# rules to apply to that repository. Multiple patterns can match and all
+# unique rules will be applied. The pattern is a Unix shell-style wildcard.
+# As seen below all repositories will have the svnauthz-validate and xmllint
+# rules applied and repositories in /repos or below will have admin-rw-authz
+# applied.
+[repositories]
+#* = svnauthz-validate xmllint
+#/repos/* = admin-rw-authz
+
+# Rules allow you define a pattern to match against which files in the
+# repository to run a command against. Rules are defined by creating a
+# section name starting with 'rule:' as seen below.
+#
+# The pattern option is a Unix shell-style wildcard match against the
+# files in the repo that the rule will be run for. A leading / in your
+# pattern will be ignored. Paths segments are / separated regardless of
+# platform.
+#
+# The command option is the command to run, this command will be run via
+# the shell of your platform. The following environment variables will
+# be defined for you:
+# REPO = the path of the repository for the commit.
+# TXN = the transaction id of the commit.
+# FILE = the name of the file that matched the pattern.
+#
+# IMPORTANT: AS A CONSEQUENCE OF THE USE OF THE SHELL IT IS IMPORTANT TO
+# QUOTE THE ARGUMENTS OF YOUR COMMANDS. THE FILE VARIABLE DOES CONTAIN
+# USER GENERATED DATA AND SHELL METACHARACTERS ARE NOT ESCAPED FOR YOU!
+#
+# The following examples assume a POSIX shell, if your platform has a
+# different shell you may need to adjust them. For example on Windows
+# cmd.exe uses %VARIABLENAME% instead of $VARIABLENAME to expand environment
+# variables.
+#
+# The following rule runs the svnauthz command's validate subcommand
+# for file named authz in the conf subdir if it is present in the commit.
+# This is a simple way to ensure that invalid authz files are not allowed
+# to be committed.
+#[rule:svnauthz-validate]
+#pattern = conf/authz
+#command = '%(svnauthz)s' validate -t "$TXN" "$REPO" "$FILE"
+
+# The following rule runs the svnauthz command's accessof subcommand
+# for any file ending in .authz for the conf subdir and checks that the admin
+# user has rw rights to the same file. This can be used to prevent an
+# authz file being committed that would remove access for the admin user.
+# Note that accessof also validates the validity of the file as well as
+# checking the permissions, so it's unecessary to run validate and accessof.
+#[rule:admin-rw-authz]
+#pattern = /conf/*.authz
+#command = '%(svnauthz)s' accessof --username admin --path "$FILE" --is rw -t "$TXN" "$REPO" "$FILE"
+
+# Use the xmllint command to validate all files ending in .xml
+#[rule:xmllint]
+#pattern = *.xml
+#command = '%(svnlook)s' cat -t "$TXN" "$REPO" "$FILE" | '%(xmllint)s' --noout -
diff --git a/tools/hook-scripts/validate-files.py b/tools/hook-scripts/validate-files.py
new file mode 100755
index 0000000..7169251
--- /dev/null
+++ b/tools/hook-scripts/validate-files.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Subversion pre-commit hook script that runs user configured commands
+to validate files in the commit and reject the commit if the commands
+exit with a non-zero exit code. The script expects a validate-files.conf
+file placed in the conf dir under the repo the commit is for."""
+
+import sys
+import os
+import subprocess
+import fnmatch
+
+# Deal with the rename of ConfigParser to configparser in Python3
+try:
+ # Python >= 3.0
+ import configparser
+except ImportError:
+ # Python < 3.0
+ import ConfigParser as configparser
+
+class Config(configparser.SafeConfigParser):
+ """Superclass of SafeConfigParser with some customizations
+ for this script"""
+ def optionxform(self, option):
+ """Redefine optionxform so option names are case sensitive"""
+ return option
+
+ def getlist(self, section, option):
+ """Returns value of option as a list using whitespace to
+ split entries"""
+ value = self.get(section, option)
+ if value:
+ return value.split()
+ else:
+ return None
+
+ def get_matching_rules(self, repo):
+ """Return list of unique rules names that apply to a given repo"""
+ rules = {}
+ for option in self.options('repositories'):
+ if fnmatch.fnmatch(repo, option):
+ for rule in self.getlist('repositories', option):
+ rules[rule] = True
+ return rules.keys()
+
+ def get_rule_section_name(self, rule):
+ """Given a rule name provide the section name it is defined in."""
+ return 'rule:%s' % (rule)
+
+class Commands:
+ """Class to handle logic of running commands"""
+ def __init__(self, config):
+ self.config = config
+
+ def svnlook_changed(self, repo, txn):
+ """Provide list of files changed in txn of repo"""
+ svnlook = self.config.get('DEFAULT', 'svnlook')
+ cmd = "'%s' changed -t '%s' '%s'" % (svnlook, txn, repo)
+ p = subprocess.Popen(cmd, shell=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ changed = []
+ while True:
+ line = p.stdout.readline()
+ if not line:
+ break
+ line = line.decode().strip()
+ text_mod = line[0:1]
+ # Only if the contents of the file changed (by addition or update)
+ # directories always end in / in the svnlook changed output
+ if line[-1] != "/" and (text_mod == "A" or text_mod == "U"):
+ changed.append(line[4:])
+
+ # wait on the command to finish so we can get the
+ # returncode/stderr output
+ data = p.communicate()
+ if p.returncode != 0:
+ sys.stderr.write(data[1].decode())
+ sys.exit(2)
+
+ return changed
+
+ def user_command(self, section, repo, txn, fn):
+ """ Run the command defined for a given section.
+ Replaces $REPO, $TXN and $FILE with the repo, txn and fn arguments
+ in the defined command.
+
+ Returns a tuple of the exit code and the stderr output of the command"""
+ cmd = self.config.get(section, 'command')
+ cmd_env = os.environ.copy()
+ cmd_env['REPO'] = repo
+ cmd_env['TXN'] = txn
+ cmd_env['FILE'] = fn
+ p = subprocess.Popen(cmd, shell=True, env=cmd_env, stderr=subprocess.PIPE)
+ data = p.communicate()
+ return (p.returncode, data[1].decode())
+
+def main(repo, txn):
+ exitcode = 0
+ config = Config()
+ config.read(os.path.join(repo, 'conf', 'validate-files.conf'))
+ commands = Commands(config)
+
+ rules = config.get_matching_rules(repo)
+
+ # no matching rules so nothing to do
+ if len(rules) == 0:
+ sys.exit(0)
+
+ changed = commands.svnlook_changed(repo, txn)
+ # this shouldn't ever happen
+ if len(changed) == 0:
+ sys.exit(0)
+
+ for rule in rules:
+ section = config.get_rule_section_name(rule)
+ pattern = config.get(section, 'pattern')
+
+ # skip leading slashes if present in the pattern
+ if pattern[0] == '/': pattern = pattern[1:]
+
+ for fn in fnmatch.filter(changed, pattern):
+ (returncode, err_mesg) = commands.user_command(section, repo,
+ txn, fn)
+ if returncode != 0:
+ sys.stderr.write(
+ "\nError validating file '%s' with rule '%s' " \
+ "(exit code %d):\n" % (fn, rule, returncode))
+ sys.stderr.write(err_mesg)
+ exitcode = 1
+
+ return exitcode
+
+if __name__ == "__main__":
+ if len(sys.argv) != 3:
+ sys.stderr.write("invalid args\n")
+ sys.exit(0)
+
+ try:
+ sys.exit(main(sys.argv[1], sys.argv[2]))
+ except configparser.Error as e:
+ sys.stderr.write("Error with the validate-files.conf: %s\n" % e)
+ sys.exit(2)
diff --git a/tools/hook-scripts/verify-po.py b/tools/hook-scripts/verify-po.py
new file mode 100755
index 0000000..b860901
--- /dev/null
+++ b/tools/hook-scripts/verify-po.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+"""This is a pre-commit hook that checks whether the contents of PO files
+committed to the repository are encoded in UTF-8.
+"""
+
+import codecs
+import string
+import sys
+import subprocess
+from svn import core, fs, delta, repos
+
+# Set to the path of the 'msgfmt' executable to use msgfmt to check
+# the syntax of the po file
+
+USE_MSGFMT = None
+
+if USE_MSGFMT is not None:
+ class MsgFmtChecker:
+ def __init__(self):
+ self.pipe = subprocess.Popen([USE_MSGFMT, "-c", "-o", "/dev/null", "-"],
+ stdin=subprocess.PIPE,
+ close_fds=sys.platform != "win32")
+ self.io_error = 0
+
+ def write(self, data):
+ if self.io_error:
+ return
+ try:
+ self.pipe.stdin.write(data)
+ except IOError:
+ self.io_error = 1
+
+ def close(self):
+ try:
+ self.pipe.stdin.close()
+ except IOError:
+ self.io_error = 1
+ return self.pipe.wait() == 0 and not self.io_error
+else:
+ class MsgFmtChecker:
+ def write(self, data):
+ pass
+ def close(self):
+ return 1
+
+
+class ChangeReceiver(delta.Editor):
+ def __init__(self, txn_root, base_root, pool):
+ self.txn_root = txn_root
+ self.base_root = base_root
+ self.pool = pool
+
+ def add_file(self, path, parent_baton,
+ copyfrom_path, copyfrom_revision, file_pool):
+ return [0, path]
+
+ def open_file(self, path, parent_baton, base_revision, file_pool):
+ return [0, path]
+
+ def apply_textdelta(self, file_baton, base_checksum):
+ file_baton[0] = 1
+ # no handler
+ return None
+
+ def close_file(self, file_baton, text_checksum):
+ changed, path = file_baton
+ if len(path) < 3 or path[-3:] != '.po' or not changed:
+ # This is not a .po file, or it hasn't changed
+ return
+
+ try:
+ # Read the file contents through a validating UTF-8 decoder
+ subpool = core.svn_pool_create(self.pool)
+ checker = MsgFmtChecker()
+ try:
+ stream = core.Stream(fs.file_contents(self.txn_root, path, subpool))
+ reader = codecs.getreader('UTF-8')(stream, 'strict')
+ writer = codecs.getwriter('UTF-8')(checker, 'strict')
+ while True:
+ data = reader.read(core.SVN_STREAM_CHUNK_SIZE)
+ if not data:
+ break
+ writer.write(data)
+ if not checker.close():
+ sys.exit("PO format check failed for '" + path + "'")
+ except UnicodeError:
+ sys.exit("PO file is not in UTF-8: '" + path + "'")
+ finally:
+ core.svn_pool_destroy(subpool)
+
+
+def check_po(pool, repos_path, txn):
+ def authz_cb(root, path, pool):
+ return 1
+
+ fs_ptr = repos.fs(repos.open(repos_path, pool))
+ txn_ptr = fs.open_txn(fs_ptr, txn, pool)
+ txn_root = fs.txn_root(txn_ptr, pool)
+ base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool)
+ editor = ChangeReceiver(txn_root, base_root, pool)
+ e_ptr, e_baton = delta.make_editor(editor, pool)
+ repos.dir_delta(base_root, '', '', txn_root, '',
+ e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool)
+
+
+if __name__ == '__main__':
+ assert len(sys.argv) == 3
+ core.run_app(check_po, sys.argv[1], sys.argv[2])
diff --git a/tools/po/l10n-report.py b/tools/po/l10n-report.py
new file mode 100755
index 0000000..a3e0fd6
--- /dev/null
+++ b/tools/po/l10n-report.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+#
+# $Id: l10n-report.py 1741723 2016-04-30 08:16:53Z stefan2 $
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+"""Usage: l10n-report.py [OPTION...]
+
+Send the l10n translation status report to an email address. If the
+email address is not specified, print in stdout.
+
+Options:
+
+ -h, --help Show this help message.
+
+ -m, --to-email-id Send the l10n translation status report to this
+ email address.
+"""
+
+import sys
+import getopt
+import os
+import re
+import subprocess
+
+FROM_ADDRESS = "Subversion Translation Status <noreply@subversion.apache.org>"
+LIST_ADDRESS = "dev@subversion.apache.org"
+SUBJECT_TEMPLATE = "[l10n] Translation status report for %s r%s"
+MAIL_THREAD_ID = '<translation_status_report_for_%s@subversion.apache.org>'
+
+def _rev():
+ dollar = "$Revision: 1741723 $"
+ return int(re.findall('[0-9]+', dollar)[0]);
+
+def usage_and_exit(errmsg=None):
+ """Print a usage message, plus an ERRMSG (if provided), then exit.
+ If ERRMSG is provided, the usage message is printed to stderr and
+ the script exits with a non-zero error code. Otherwise, the usage
+ message goes to stdout, and the script exits with a zero
+ errorcode."""
+ if errmsg is None:
+ stream = sys.stdout
+ else:
+ stream = sys.stderr
+ stream.write("%s\n" % __doc__)
+ stream.flush()
+ if errmsg:
+ stream.write("\nError: %s\n" % errmsg)
+ stream.flush()
+ sys.exit(2)
+ sys.exit(0)
+
+
+class l10nReport:
+ def __init__(self, to_email_id=""):
+ self.to_email_id = to_email_id
+ self.from_email_id = "<%s>" % LIST_ADDRESS
+
+ def safe_command(self, cmd_and_args, cmd_in=""):
+ [stdout, stderr] = subprocess.Popen(cmd_and_args, \
+ stdin=subprocess.PIPE, \
+ stdout=subprocess.PIPE, \
+ stderr=subprocess.PIPE).communicate(input=cmd_in)
+ return stdout, stderr
+
+ def match(self, pattern, string):
+ if isinstance(pattern, basestring):
+ pattern = re.compile(pattern)
+ match = re.compile(pattern).search(string)
+ if match and match.groups():
+ return match.group(1)
+ else:
+ return None
+
+ def get_msgattribs(self, file):
+ msgout = self.safe_command(['msgattrib', '--translated', file])[0]
+ grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0]
+ sedout = self.safe_command(['sed', '1d'], grepout)[0]
+ trans = self.safe_command(['wc', '-l'], sedout)[0]
+
+ msgout = self.safe_command(['msgattrib', '--untranslated', file])[0]
+ grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0]
+ sedout = self.safe_command(['sed', '1d'], grepout)[0]
+ untrans = self.safe_command(['wc', '-l'], sedout)[0]
+
+ msgout = self.safe_command(['msgattrib', '--only-fuzzy', file])[0]
+ grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0]
+ sedout = self.safe_command(['sed', '1d'], grepout)[0]
+ fuzzy = self.safe_command(['wc', '-l'], sedout)[0]
+
+ msgout = self.safe_command(['msgattrib', '--only-obsolete', file])[0]
+ grepout = self.safe_command(['grep', '-E', '^#~ msgid *"'], msgout)[0]
+ obsolete = self.safe_command(['wc', '-l'], grepout)[0]
+
+ return int(trans), int(untrans), int(fuzzy), int(obsolete)
+
+ def pre_l10n_report(self):
+ # svn revert --recursive subversion/po
+ cmd = ['svn', 'revert', '--recursive', 'subversion/po']
+ stderr = self.safe_command(cmd)[1]
+ if stderr:
+ sys.stderr.write("\nError: %s\n" % stderr)
+ sys.stderr.flush()
+ sys.exit(0)
+
+ # svn update
+ cmd = ['svn', 'update']
+ stderr = self.safe_command(cmd)[1]
+ if stderr:
+ sys.stderr.write("\nError: %s\n" % stderr)
+ sys.stderr.flush()
+ sys.exit(0)
+
+ # tools/po/po-update.sh
+ cmd = ['sh', 'tools/po/po-update.sh']
+ self.safe_command(cmd)
+
+
+def bar_graph(nominal_length, trans, untrans, fuzzy, obsolete):
+ """Format the given four counts into a bar graph string in which the
+ total length of the bars representing the TRANS, UNTRANS and FUZZY
+ counts is NOMINAL_LENGTH characters, and the bar representing the
+ OBSOLETE count extends beyond that."""
+
+ total_count = trans + untrans + fuzzy # don't include 'obsolete'
+ accum_bar = 0
+ accum_count = 0
+ s = ''
+ for count, letter in [(trans, '+'), (untrans, 'U'), (fuzzy, '~'),
+ (obsolete, 'o')]:
+ accum_count += count
+ new_bar_end = nominal_length * accum_count / total_count
+ s += letter * (new_bar_end - accum_bar)
+ accum_bar = new_bar_end
+ return s
+
+
+def main():
+ # Parse the command-line options and arguments.
+ try:
+ opts, args = getopt.gnu_getopt(sys.argv[1:], "hm:",
+ ["help",
+ "to-email-id=",
+ ])
+ except getopt.GetoptError as msg:
+ usage_and_exit(msg)
+
+ to_email_id = None
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ usage_and_exit()
+ elif opt in ("-m", "--to-email-id"):
+ to_email_id = arg
+
+ l10n = l10nReport()
+ os.chdir("%s/../.." % os.path.dirname(os.path.abspath(sys.argv[0])))
+ l10n.pre_l10n_report()
+ [info_out, info_err] = l10n.safe_command(['svn', 'info'])
+ if info_err:
+ sys.stderr.write("\nError: %s\n" % info_err)
+ sys.stderr.flush()
+ sys.exit(0)
+
+ po_dir = 'subversion/po'
+ branch_name = l10n.match('URL:.*/asf/subversion/(\S+)', info_out)
+ [info_out, info_err] = l10n.safe_command(['svnversion', po_dir])
+ if info_err:
+ sys.stderr.write("\nError: %s\n" % info_err)
+ sys.stderr.flush()
+ sys.exit(0)
+
+ wc_version = re.sub('[MS]', '', info_out.strip())
+ title = "Translation status report for %s@r%s" % \
+ (branch_name, wc_version)
+
+ os.chdir(po_dir)
+ files = sorted(os.listdir('.'))
+ format_head = "\n%6s %7s %7s %7s %7s" % ("lang", "trans", "untrans",
+ "fuzzy", "obs")
+ format_line = "--------------------------------------"
+ print("\n%s\n%s\n%s" % (title, format_head, format_line))
+
+ body = ""
+ po_pattern = re.compile('(.*).po$')
+ for file in files:
+ lang = l10n.match(po_pattern, file)
+ if not lang:
+ continue
+ [trans, untrans, fuzzy, obsolete] = l10n.get_msgattribs(file)
+ po_format = "%6s %7d %7d %7d %7d" %\
+ (lang, trans, untrans, fuzzy, obsolete)
+ po_format += " " + bar_graph(30, trans, untrans, fuzzy, obsolete)
+ body += "%s\n" % po_format
+ print(po_format)
+
+ if to_email_id:
+ import smtplib
+ # Ensure compatibility of the email module all the way to Python 2.3
+ try:
+ from email.message import Message
+ except ImportError:
+ from email.Message import Message
+
+ msg = Message()
+ msg["From"] = FROM_ADDRESS
+ msg["To"] = to_email_id
+ msg["Subject"] = SUBJECT_TEMPLATE % (branch_name, wc_version)
+ msg["X-Mailer"] = "l10n-report.py r%s" % _rev()
+ msg["Reply-To"] = LIST_ADDRESS
+ msg["Mail-Followup-To"] = LIST_ADDRESS
+ msg["In-Reply-To"] = MAIL_THREAD_ID % (branch_name.replace('/', '_'))
+ msg["References"] = msg["In-Reply-To"]
+
+ # http://www.iana.org/assignments/auto-submitted-keywords/auto-submitted-keywords.xhtml
+ msg["Auto-Submitted"] = 'auto-generated'
+
+ msg.set_type("text/plain")
+ msg.set_payload("\n".join((title, format_head, format_line, body)))
+
+ server = smtplib.SMTP('localhost')
+ server.sendmail("From: " + FROM_ADDRESS,
+ "To: " + to_email_id,
+ msg.as_string())
+ print("The report is sent to '%s' email id." % to_email_id)
+ else:
+ print("\nYou have not passed '-m' option, so email is not sent.")
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/po/po-update.sh b/tools/po/po-update.sh
new file mode 100755
index 0000000..2aca523
--- /dev/null
+++ b/tools/po/po-update.sh
@@ -0,0 +1,123 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#
+# Usage:
+# ./po-update.sh pot
+# - to generate just the pot file
+# ./po-update.sh
+# - to update all locales
+# ./po-update.sh LL
+# - to update one the LL locale
+
+set -e
+
+XGETTEXT=${XGETTEXT:-xgettext}
+MSGMERGE=${MSGMERGE:-msgmerge}
+
+svn_base=
+for i in . .. ../..; do
+ if [ -d "$i/subversion/po" ]; then
+ svn_base="$i"
+ break
+ fi
+done
+if [ -z "$svn_base" ]; then
+ echo "E: You must run po-update.sh from within a Subversion source tree." >&2
+ exit 1
+fi
+
+pot_done=
+
+make_pot()
+{
+ if [ -z "$pot_done" ]; then
+ echo "Building subversion.pot..."
+ (cd $svn_base/subversion/po && \
+ find .. \
+ -name .svn -prune -or \
+ -name tests -prune -or \
+ -name bindings -prune -or \
+ -name "*.c" -print -or \
+ -name "svn_error_codes.h" -print -or \
+ -name "svn_fs_util.h" -print | \
+ $XGETTEXT --sort-by-file -k_ -kN_ -kQ_:1,2 -kSVN_ERRDEF:3 \
+ --flag=_:1:pass-c-format \
+ --flag=N_:1:pass-c-format \
+ --flag=Q_:1:pass-c-format \
+ --flag=Q_:2:pass-c-format \
+ --flag=svn_cmdline_printf:2:c-format \
+ --flag=svn_cmdline_fprintf:3:c-format \
+ --flag=svn_error_createf:3:c-format \
+ --flag=svn_error_wrap_apr:2:c-format \
+ --flag=svn_stream_printf:3:c-format \
+ --flag=svn_stream_printf_from_utf8:4:c-format \
+ --flag=svn_string_createf:2:c-format \
+ --flag=svn_string_createv:2:c-format \
+ --flag=svn_stringbuf_createf:2:c-format \
+ --flag=svn_stringbuf_createv:2:c-format \
+ --flag=svn_fs_bdb__dberrf:3:c-format \
+ --flag=file_printf_from_utf8:3:c-format \
+ --flag=do_io_file_wrapper_cleanup:3:c-format \
+ --flag=do_io_file_wrapper_cleanup:4:c-format \
+ --msgid-bugs-address=dev@subversion.apache.org \
+ --add-comments --files-from=- -o subversion.pot )
+ pot_done=1
+ fi
+}
+
+update_po()
+{
+ (cd $svn_base/subversion/po &&
+ for i in $1.po; do
+ echo "Updating $i..."
+ # In a display of truly bizarre behaviour, msgmerge (at least, the
+ # GNU gettext-tools 0.14.6 implementation) inverts the order of obsolete
+ # messages every time it is run. Therefore, run it twice, to invert and
+ # then re-invert, to minimize spurious diffs.
+ $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
+ $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
+ done )
+}
+
+if [ $# -eq 0 ]; then
+ make_pot
+ update_po \*
+else
+ langs=
+ while [ $# -ge 1 ]; do
+ case $1 in
+ pot) ;;
+ *)
+ if [ -e $svn_base/subversion/po/$1.po ]; then
+ langs="$langs $1"
+ else
+ echo "E: No such .po file '$1.po'" >&2
+ exit 1
+ fi
+ esac
+ shift
+ done
+ make_pot
+ for lang in $langs; do
+ update_po $lang
+ done
+fi
diff --git a/tools/server-side/fsfs-reshard.py b/tools/server-side/fsfs-reshard.py
new file mode 100755
index 0000000..d9937ad
--- /dev/null
+++ b/tools/server-side/fsfs-reshard.py
@@ -0,0 +1,399 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# fsfs-reshard.py REPOS_PATH MAX_FILES_PER_SHARD
+#
+# Perform an offline conversion of an FSFS repository between linear (format
+# 2, usable by Subversion 1.4+) and sharded (format 3, usable by Subversion
+# 1.5+) layouts.
+#
+# The MAX_FILES_PER_SHARD argument specifies the maximum number of files
+# that will be stored in each shard (directory), or zero to specify a linear
+# layout. Subversion 1.5 uses a default value of 1000 files per shard.
+#
+# As the repository will not be valid while the conversion is in progress,
+# the repository administrator must ensure that access to the repository is
+# blocked for the duration of the conversion.
+#
+# In the event that the conversion is interrupted, the repository will be in
+# an inconsistent state. The repository administrator should then re-run
+# this tool to completion.
+#
+#
+# Note that, currently, resharding from one sharded layout to another is
+# likely to be an extremely slow process. To reshard, we convert from a
+# sharded to linear layout and then to the new sharded layout. The problem
+# is that the initial conversion to the linear layout triggers exactly the
+# same 'large number of files in a directory' problem that sharding is
+# intended to solve.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.10.x/tools/server-side/fsfs-reshard.py $
+# $LastChangedDate: 2016-04-30 08:16:53 +0000 (Sat, 30 Apr 2016) $
+# $LastChangedBy: stefan2 $
+# $LastChangedRevision: 1741723 $
+
+import os, stat, sys
+
+from errno import EEXIST
+
+def usage():
+ """Print a usage message and exit."""
+ print("""usage: %s REPOS_PATH MAX_FILES_PER_SHARD [START END]
+
+Perform an offline conversion of an FSFS repository between linear
+(readable by Subversion 1.4 or later) and sharded (readable by
+Subversion 1.5 or later) layouts.
+
+The MAX_FILES_PER_SHARD argument specifies the maximum number of
+files that will be stored in each shard (directory), or zero to
+specify a linear layout. Subversion 1.5 uses a default value of
+1000 files per shard.
+
+Convert revisions START through END inclusive if specified, or all
+revisions if unspecified.
+""" % sys.argv[0])
+ sys.exit(1)
+
+def incompatible_repos_format(repos_path, format):
+ """Print an error saying that REPOS_PATH is a repository with an
+ incompatible repository format FORMAT, then exit."""
+ sys.stderr.write("""error: unable to convert repository '%s'.
+
+This repository is not compatible with this tool. Valid
+repository formats are '3' or '5'; this repository is
+format '%s'.
+
+""" % (repos_path, format))
+ sys.stderr.flush()
+ sys.exit(1)
+
+def incompatible_fs_format(repos_path, format):
+ """Print an error saying that REPOS_PATH is a repository with an
+ incompatible filesystem format FORMAT, then exit."""
+ sys.stderr.write("""error: unable to convert repository '%s'.
+
+This repository contains a filesystem that is not compatible with
+this tool. Valid filesystem formats are '1', '2', or '3'; this
+repository contains a filesystem with format '%s'.
+
+""" % (repos_path, format))
+ sys.stderr.flush()
+ sys.exit(1)
+
+def unexpected_fs_format_options(repos_path):
+ """Print an error saying that REPOS_PATH is a repository with
+ unexpected filesystem format options, then exit."""
+ sys.stderr.write("""error: unable to convert repository '%s'.
+
+This repository contains a filesystem that appears to be invalid -
+there is unexpected data after the filesystem format number.
+
+""" % repos_path)
+ sys.stderr.flush()
+ sys.exit(1)
+
+def incompatible_fs_format_option(repos_path, option):
+ """Print an error saying that REPOS_PATH is a repository with an
+ incompatible filesystem format option OPTION, then exit."""
+ sys.stderr.write("""error: unable to convert repository '%s'.
+
+This repository contains a filesystem that is not compatible with
+this tool. This tool recognises the 'layout' option but the
+filesystem uses the '%s' option.
+
+""" % (repos_path, option))
+ sys.stderr.flush()
+ sys.exit(1)
+
+def warn_about_fs_format_1(repos_path, format_path):
+ """Print a warning saying that REPOS_PATH contains a format 1 FSFS
+ filesystem that we can't reconstruct, then exit."""
+ sys.stderr.write("""warning: conversion of '%s' will be one-way.
+
+This repository is currently readable by Subversion 1.1 or later.
+This tool can convert this repository to one that is readable by
+either Subversion 1.4 (or later) or Subversion 1.5 (or later),
+but it is not able to convert it back to the original format - a
+separate dump/load step would be required.
+
+If you would like to upgrade this repository anyway, delete the
+file '%s' and re-run this tool.
+
+""" % (repos_path, format_path))
+ sys.stderr.flush()
+ sys.exit(1)
+
+def check_repos_format(repos_path):
+ """Check that REPOS_PATH contains a repository with a suitable format;
+ print a message and exit if not."""
+ format_path = os.path.join(repos_path, 'format')
+ try:
+ format_file = open(format_path)
+ format = format_file.readline()
+ if not format.endswith('\n'):
+ incompatible_repos_format(repos_path, format + ' <missing newline>')
+ format = format.rstrip('\n')
+ if format == '3' or format == '5':
+ pass
+ else:
+ incompatible_repos_format(repos_path, format)
+ except IOError:
+ # In all likelihood, the file doesn't exist.
+ incompatible_repos_format(repos_path, '<unreadable>')
+
+def check_fs_format(repos_path):
+ """Check that REPOS_PATH contains a filesystem with a suitable format,
+ or that it contains no format file; print a message and exit if neither
+ is true. Return bool whether the filesystem is sharded."""
+ sharded = False
+ db_path = os.path.join(repos_path, 'db')
+ format_path = os.path.join(db_path, 'format')
+ try:
+ format_file = open(format_path)
+ format = format_file.readline()
+ if not format.endswith('\n'):
+ incompatible_fs_format(repos_path, format + ' <missing newline>')
+ format = format.rstrip('\n')
+ if format == '1':
+ # This is a format 1 (svndiff0 only) filesystem. We can upgrade it,
+ # but we can't downgrade again (since we can't uncompress any of the
+ # svndiff1 deltas that may have been written). Warn the user and exit.
+ warn_about_fs_format_1(repos_path, format_path)
+ if format == '2':
+ pass
+ elif format == '3':
+ pass
+ else:
+ incompatible_fs_format(repos_path, format)
+
+ for line in format_file:
+ if format == '2':
+ unexpected_fs_format_options(repos_path)
+
+ line = line.rstrip('\n')
+ if line == 'layout linear':
+ pass
+ elif line.startswith('layout sharded '):
+ sharded = True
+ else:
+ incompatible_fs_format_option(repos_path, line)
+
+ format_file.close()
+ except IOError:
+ # The format file might not exist if we've previously been interrupted,
+ # or if the user is following our advice about upgrading a format 1
+ # repository. In both cases, we'll just assume the format was
+ # compatible.
+ pass
+
+ return sharded
+
+def current_file(repos_path):
+ """Return triple of (revision, next_node_id, next_copy_id) from
+ REPOS_PATH/db/current ."""
+ return open(os.path.join(repos_path, 'db', 'current')).readline().split()
+
+def remove_fs_format(repos_path):
+ """Remove the filesystem format file for repository REPOS_PATH.
+ Do not raise an error if the file is already missing."""
+ format_path = os.path.join(repos_path, 'db', 'format')
+ try:
+ statinfo = os.stat(format_path)
+ except OSError:
+ # The file probably doesn't exist.
+ return
+
+ # On Windows, we need to ensure the file is writable before we can
+ # remove it.
+ os.chmod(format_path, statinfo.st_mode | stat.S_IWUSR)
+ os.remove(format_path)
+
+def write_fs_format(repos_path, contents):
+ """Write a new filesystem format file for repository REPOS_PATH containing
+ CONTENTS."""
+ format_path = os.path.join(repos_path, 'db', 'format')
+ f = open(format_path, 'wb')
+ f.write(contents)
+ f.close()
+ os.chmod(format_path, stat.S_IRUSR | stat.S_IRGRP)
+
+def linearise(path):
+ """Move all the files in subdirectories of PATH into PATH, and remove the
+ subdirectories. Handle conflicts between subdirectory names and files
+ contained in subdirectories by ensuring subdirectories have a '.shard'
+ suffix prior to moving (the files are assumed not to have this suffix.
+ Abort if a subdirectory is found to contain another subdirectory."""
+ # First enumerate all subdirectories of DIR and rename where necessary
+ # to include a .shard suffix.
+ for name in os.listdir(path):
+ if name.endswith('.shard'):
+ continue
+ subdir_path = os.path.join(path, name)
+ if not os.path.isdir(subdir_path):
+ continue
+ os.rename(subdir_path, subdir_path + '.shard')
+
+ # Now move all the subdirectory contents into the parent and remove
+ # the subdirectories.
+ for root_path, dirnames, filenames in os.walk(path):
+ if root_path == path:
+ continue
+ if len(dirnames) > 0:
+ sys.stderr.write("error: directory '%s' contains other unexpected directories.\n" \
+ % root_path)
+ sys.stderr.flush()
+ sys.exit(1)
+ for name in filenames:
+ from_path = os.path.join(root_path, name)
+ to_path = os.path.join(path, name)
+ os.rename(from_path, to_path)
+ os.rmdir(root_path)
+
+def shard(path, max_files_per_shard, start, end):
+ """Move the files for revisions START to END inclusive in PATH into
+ subdirectories of PATH named such that subdirectory '0' contains at most
+ MAX_FILES_PER_SHARD files, those named [0, MAX_FILES_PER_SHARD). Abort if
+ PATH is found to contain any entries with non-numeric names."""
+
+ tmp = path + '.reshard'
+ try:
+ os.mkdir(tmp)
+ except OSError as e:
+ if e.errno != EEXIST:
+ raise
+
+ # Move all entries into shards named N.shard.
+ for rev in range(start, end + 1):
+ name = str(rev)
+ shard = rev // max_files_per_shard
+ shard_name = str(shard) + '.shard'
+
+ from_path = os.path.join(path, name)
+ to_path = os.path.join(tmp, shard_name, name)
+ try:
+ os.rename(from_path, to_path)
+ except OSError:
+ # The most likely explanation is that the shard directory doesn't
+ # exist. Let's create it and retry the rename.
+ os.mkdir(os.path.join(tmp, shard_name))
+ os.rename(from_path, to_path)
+
+ # Now rename all the shards to remove the suffix.
+ skipped = 0
+ for name in os.listdir(tmp):
+ if not name.endswith('.shard'):
+ sys.stderr.write("warning: ignoring unexpected subdirectory '%s'.\n" \
+ % os.path.join(tmp, name))
+ sys.stderr.flush()
+ skipped += 1
+ continue
+ from_path = os.path.join(tmp, name)
+ to_path = os.path.join(path, os.path.basename(from_path)[:-6])
+ os.rename(from_path, to_path)
+ skipped == 0 and os.rmdir(tmp)
+
+def main():
+ if len(sys.argv) < 3:
+ usage()
+
+ repos_path = sys.argv[1]
+ max_files_per_shard = sys.argv[2]
+ try:
+ start = int(sys.argv[3])
+ end = int(sys.argv[4])
+ except IndexError:
+ start = 0
+ end = int(current_file(repos_path)[0])
+
+ # Validate the command-line arguments.
+ db_path = os.path.join(repos_path, 'db')
+ current_path = os.path.join(db_path, 'current')
+ if not os.path.exists(current_path):
+ sys.stderr.write("error: '%s' doesn't appear to be a Subversion FSFS repository.\n" \
+ % repos_path)
+ sys.stderr.flush()
+ sys.exit(1)
+
+ try:
+ max_files_per_shard = int(max_files_per_shard)
+ except ValueError as OverflowError:
+ sys.stderr.write("error: maximum files per shard ('%s') is not a valid number.\n" \
+ % max_files_per_shard)
+ sys.stderr.flush()
+ sys.exit(1)
+
+ if max_files_per_shard < 0:
+ sys.stderr.write("error: maximum files per shard ('%d') must not be negative.\n" \
+ % max_files_per_shard)
+ sys.stderr.flush()
+ sys.exit(1)
+
+ # Check the format of the repository.
+ check_repos_format(repos_path)
+ sharded = check_fs_format(repos_path)
+
+ # Let the user know what's going on.
+ if max_files_per_shard > 0:
+ print("Converting '%s' to a sharded structure with %d files per directory" \
+ % (repos_path, max_files_per_shard))
+ if sharded:
+ print('(will convert to a linear structure first)')
+ else:
+ print("Converting '%s' to a linear structure" % repos_path)
+
+ # Prevent access to the repository for the duration of the conversion.
+ # There's no clean way to do this, but since the format of the repository
+ # is indeterminate, let's remove the format file while we're converting.
+ print('- marking the repository as invalid')
+ remove_fs_format(repos_path)
+
+ # First, convert to a linear scheme (this makes recovery easier because
+ # it's easier to reason about the behaviour on restart).
+ if sharded:
+ print('- linearising db/revs')
+ linearise(os.path.join(repos_path, 'db', 'revs'))
+ print('- linearising db/revprops')
+ linearise(os.path.join(repos_path, 'db', 'revprops'))
+
+ if max_files_per_shard == 0:
+ # We're done. Stamp the filesystem with a format 2 db/format file.
+ print('- marking the repository as a valid linear repository')
+ write_fs_format(repos_path, '2\n')
+ else:
+ print('- sharding db/revs')
+ shard(os.path.join(repos_path, 'db', 'revs'), max_files_per_shard,
+ start, end)
+ print('- sharding db/revprops')
+ shard(os.path.join(repos_path, 'db', 'revprops'), max_files_per_shard,
+ start, end)
+
+ # We're done. Stamp the filesystem with a format 3 db/format file.
+ print('- marking the repository as a valid sharded repository')
+ write_fs_format(repos_path, '3\nlayout sharded %d\n' % max_files_per_shard)
+
+ print('- done.')
+ sys.exit(0)
+
+if __name__ == '__main__':
+ raise Exception("""This script is unfinished and not ready to be used on live data.
+ Trust us.""")
+ main()
diff --git a/tools/server-side/mod_dontdothat/README b/tools/server-side/mod_dontdothat/README
new file mode 100644
index 0000000..7d4fe36
--- /dev/null
+++ b/tools/server-side/mod_dontdothat/README
@@ -0,0 +1,53 @@
+mod_dontdothat is an Apache module that allows you to block specific types
+of Subversion requests. Specifically, it's designed to keep users from doing
+things that are particularly hard on the server, like checking out the root
+of the tree, or the tags or branches directories. It works by sticking an
+input filter in front of all REPORT requests and looking for dangerous types
+of requests. If it finds any, it returns a 403 Forbidden error.
+
+You can compile and install it via apxs:
+
+$ apxs -c \
+ -I$PREFIX/include/subversion-1 \
+ -L$PREFIX/lib -lsvn_subr-1
+ mod_dontdothat.c
+
+$ apxs -i -n dontdothat mod_dontdothat.la
+
+It is enabled via single httpd.conf directive, DontDoThatConfigFile:
+
+<Location /svn>
+ DAV svn
+ SVNParentPath /path/to/repositories
+ DontDoThatConfigFile /path/to/config.file
+ DontDoThatDisallowReplay off
+</Location>
+
+The file you give to DontDoThatConfigFile is a Subversion configuration file
+that contains the following sections.
+
+[recursive-actions]
+/*/trunk = allow
+/ = deny
+/* = deny
+/*/tags = deny
+/*/branches = deny
+/*/* = deny
+/*/*/tags = deny
+/*/*/branches = deny
+
+As you might guess, this defines a set of patterns that control what the
+user is not allowed to do. Anything with a 'deny' after it is denied, and
+as a fallback mechanism anything with an 'allow' after it is special cased
+to be allowed, even if it matches something that is denied.
+
+Note that the wildcard portions of a rule only swallow a single directory,
+so /* will match /foo, but not /foo/bar. They also must be at the end of
+a directory segment, so /foo* or /* are valid, but /*foo is not.
+
+These rules are applied to any recursive action, which basically means any
+Subversion command that goes through the update-report, like update, diff,
+checkout, merge, etc.
+
+The DontDoThatDisallowReplay option makes mod_dontdothat disallow
+replay requests, which is on by default.
diff --git a/tools/server-side/mod_dontdothat/mod_dontdothat.c b/tools/server-side/mod_dontdothat/mod_dontdothat.c
new file mode 100644
index 0000000..dc63c0a
--- /dev/null
+++ b/tools/server-side/mod_dontdothat/mod_dontdothat.c
@@ -0,0 +1,711 @@
+/*
+ * mod_dontdothat.c: an Apache filter that allows you to return arbitrary
+ * errors for various types of Subversion requests.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include <httpd.h>
+#include <http_config.h>
+#include <http_protocol.h>
+#include <http_request.h>
+#include <http_log.h>
+#include <util_filter.h>
+#include <ap_config.h>
+#include <apr_strings.h>
+#include <apr_uri.h>
+
+#include "mod_dav_svn.h"
+#include "svn_string.h"
+#include "svn_config.h"
+#include "svn_path.h"
+#include "svn_xml.h"
+#include "private/svn_fspath.h"
+
+extern module AP_MODULE_DECLARE_DATA dontdothat_module;
+
+typedef struct dontdothat_config_rec {
+ const char *config_file;
+ const char *base_path;
+ int no_replay;
+} dontdothat_config_rec;
+
+static void *create_dontdothat_dir_config(apr_pool_t *pool, char *dir)
+{
+ dontdothat_config_rec *cfg = apr_pcalloc(pool, sizeof(*cfg));
+
+ cfg->base_path = dir;
+ cfg->no_replay = 1;
+
+ return cfg;
+}
+
+static const command_rec dontdothat_cmds[] =
+{
+ AP_INIT_TAKE1("DontDoThatConfigFile", ap_set_file_slot,
+ (void *) APR_OFFSETOF(dontdothat_config_rec, config_file),
+ OR_ALL,
+ "Text file containing actions to take for specific requests"),
+ AP_INIT_FLAG("DontDoThatDisallowReplay", ap_set_flag_slot,
+ (void *) APR_OFFSETOF(dontdothat_config_rec, no_replay),
+ OR_ALL, "Disallow replay requests as if they are other recursive requests."),
+ { NULL }
+};
+
+typedef enum parse_state_t {
+ STATE_BEGINNING,
+ STATE_IN_UPDATE,
+ STATE_IN_SRC_PATH,
+ STATE_IN_DST_PATH,
+ STATE_IN_RECURSIVE
+} parse_state_t;
+
+typedef struct dontdothat_filter_ctx {
+ /* Set to TRUE when we determine that the request is safe and should be
+ * allowed to continue. */
+ svn_boolean_t let_it_go;
+
+ /* Set to TRUE when we determine that the request is unsafe and should be
+ * stopped in its tracks. */
+ svn_boolean_t no_soup_for_you;
+
+ svn_xml_parser_t *xmlp;
+
+ /* The current location in the REPORT body. */
+ parse_state_t state;
+
+ /* A buffer to hold CDATA we encounter. */
+ svn_stringbuf_t *buffer;
+
+ dontdothat_config_rec *cfg;
+
+ /* An array of wildcards that are special cased to be allowed. */
+ apr_array_header_t *allow_recursive_ops;
+
+ /* An array of wildcards where recursive operations are not allowed. */
+ apr_array_header_t *no_recursive_ops;
+
+ /* TRUE if a path has failed a test already. */
+ svn_boolean_t path_failed;
+
+ /* An error for when we're using this as a baton while parsing config
+ * files. */
+ svn_error_t *err;
+
+ /* The current request. */
+ request_rec *r;
+} dontdothat_filter_ctx;
+
+/* Return TRUE if wildcard WC matches path P, FALSE otherwise. */
+static svn_boolean_t
+matches(const char *wc, const char *p)
+{
+ for (;;)
+ {
+ switch (*wc)
+ {
+ case '*':
+ if (wc[1] != '/' && wc[1] != '\0')
+ abort(); /* This was checked for during parsing of the config. */
+
+ /* It's a wild card, so eat up until the next / in p. */
+ while (*p && p[1] != '/')
+ ++p;
+
+ /* If we ran out of p and we're out of wc then it matched. */
+ if (! *p)
+ {
+ if (wc[1] == '\0')
+ return TRUE;
+ else
+ return FALSE;
+ }
+ break;
+
+ case '\0':
+ if (*p != '\0')
+ /* This means we hit the end of wc without running out of p. */
+ return FALSE;
+ else
+ /* Or they were exactly the same length, so it's not lower. */
+ return TRUE;
+
+ default:
+ if (*wc != *p)
+ return FALSE; /* If we don't match, then move on to the next
+ * case. */
+ else
+ break;
+ }
+
+ ++wc;
+ ++p;
+
+ if (! *p && *wc)
+ return FALSE;
+ }
+}
+
+/* duplicate of dav_svn__log_err() from mod_dav_svn/util.c */
+static void
+log_dav_err(request_rec *r,
+ dav_error *err,
+ int level)
+{
+ dav_error *errscan;
+
+ /* Log the errors */
+ /* ### should have a directive to log the first or all */
+ for (errscan = err; errscan != NULL; errscan = errscan->prev) {
+ apr_status_t status;
+
+ if (errscan->desc == NULL)
+ continue;
+
+#if AP_MODULE_MAGIC_AT_LEAST(20091119,0)
+ status = errscan->aprerr;
+#else
+ status = errscan->save_errno;
+#endif
+
+ ap_log_rerror(APLOG_MARK, level, status, r,
+ "%s [%d, #%d]",
+ errscan->desc, errscan->status, errscan->error_id);
+ }
+}
+
+static svn_boolean_t
+is_this_legal(dontdothat_filter_ctx *ctx, const char *uri)
+{
+ const char *relative_path;
+ const char *cleaned_uri;
+ const char *repos_name;
+ const char *uri_path;
+ int trailing_slash;
+ dav_error *derr;
+
+ /* uri can be an absolute uri or just a path, we only want the path to match
+ * against */
+ if (uri && svn_path_is_url(uri))
+ {
+ apr_uri_t parsed_uri;
+ apr_status_t rv = apr_uri_parse(ctx->r->pool, uri, &parsed_uri);
+ if (APR_SUCCESS != rv)
+ {
+ /* Error parsing the URI, log and reject request. */
+ ap_log_rerror(APLOG_MARK, APLOG_ERR, rv, ctx->r,
+ "mod_dontdothat: blocked request after failing "
+ "to parse uri: '%s'", uri);
+ return FALSE;
+ }
+ uri_path = parsed_uri.path;
+ }
+ else
+ {
+ uri_path = uri;
+ }
+
+ if (uri_path)
+ {
+ const char *repos_path;
+
+ derr = dav_svn_split_uri(ctx->r,
+ uri_path,
+ ctx->cfg->base_path,
+ &cleaned_uri,
+ &trailing_slash,
+ &repos_name,
+ &relative_path,
+ &repos_path);
+ if (! derr)
+ {
+ int idx;
+
+ if (! repos_path)
+ repos_path = "";
+
+ repos_path = svn_fspath__canonicalize(repos_path, ctx->r->pool);
+
+ /* First check the special cases that are always legal... */
+ for (idx = 0; idx < ctx->allow_recursive_ops->nelts; ++idx)
+ {
+ const char *wc = APR_ARRAY_IDX(ctx->allow_recursive_ops,
+ idx,
+ const char *);
+
+ if (matches(wc, repos_path))
+ {
+ ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r,
+ "mod_dontdothat: rule %s allows %s",
+ wc, repos_path);
+ return TRUE;
+ }
+ }
+
+ /* Then look for stuff we explicitly don't allow. */
+ for (idx = 0; idx < ctx->no_recursive_ops->nelts; ++idx)
+ {
+ const char *wc = APR_ARRAY_IDX(ctx->no_recursive_ops,
+ idx,
+ const char *);
+
+ if (matches(wc, repos_path))
+ {
+ ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r,
+ "mod_dontdothat: rule %s forbids %s",
+ wc, repos_path);
+ return FALSE;
+ }
+ }
+ }
+ else
+ {
+ log_dav_err(ctx->r, derr, APLOG_ERR);
+ return FALSE;
+ }
+
+ }
+ else
+ {
+ ap_log_rerror(APLOG_MARK, APLOG_ERR, 0, ctx->r,
+ "mod_dontdothat: empty uri passed to is_this_legal(), "
+ "module bug?");
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static apr_status_t
+dontdothat_filter(ap_filter_t *f,
+ apr_bucket_brigade *bb,
+ ap_input_mode_t mode,
+ apr_read_type_e block,
+ apr_off_t readbytes)
+{
+ dontdothat_filter_ctx *ctx = f->ctx;
+ apr_status_t rv;
+ apr_bucket *e;
+
+ if (mode != AP_MODE_READBYTES)
+ return ap_get_brigade(f->next, bb, mode, block, readbytes);
+
+ rv = ap_get_brigade(f->next, bb, mode, block, readbytes);
+ if (rv)
+ return rv;
+
+ for (e = APR_BRIGADE_FIRST(bb);
+ e != APR_BRIGADE_SENTINEL(bb);
+ e = APR_BUCKET_NEXT(e))
+ {
+ svn_boolean_t last = APR_BUCKET_IS_EOS(e);
+ const char *str;
+ apr_size_t len;
+ svn_error_t *err;
+
+ if (last)
+ {
+ str = "";
+ len = 0;
+ }
+ else
+ {
+ rv = apr_bucket_read(e, &str, &len, APR_BLOCK_READ);
+ if (rv)
+ return rv;
+ }
+
+ err = svn_xml_parse(ctx->xmlp, str, len, last);
+ if (err)
+ {
+ /* let_it_go so we clean up our parser, no_soup_for_you so that we
+ * bail out before bothering to parse this stuff a second time. */
+ ctx->let_it_go = TRUE;
+ ctx->no_soup_for_you = TRUE;
+ svn_error_clear(err);
+ }
+
+ /* If we found something that isn't allowed, set the correct status
+ * and return an error so it'll bail out before it gets anywhere it
+ * can do real damage. */
+ if (ctx->no_soup_for_you)
+ {
+ /* XXX maybe set up the SVN-ACTION env var so that it'll show up
+ * in the Subversion operational logs? */
+
+ ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r,
+ "mod_dontdothat: client broke the rules, "
+ "returning error");
+
+ /* Ok, pass an error bucket and an eos bucket back to the client.
+ *
+ * NOTE: The custom error string passed here doesn't seem to be
+ * used anywhere by httpd. This is quite possibly a bug.
+ *
+ * TODO: Try and pass back a custom document body containing a
+ * serialized svn_error_t so the client displays a better
+ * error message. */
+ bb = apr_brigade_create(f->r->pool, f->c->bucket_alloc);
+ e = ap_bucket_error_create(403, "No Soup For You!",
+ f->r->pool, f->c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, e);
+ e = apr_bucket_eos_create(f->c->bucket_alloc);
+ APR_BRIGADE_INSERT_TAIL(bb, e);
+
+ /* Don't forget to remove us, otherwise recursion blows the stack. */
+ ap_remove_input_filter(f);
+
+ return ap_pass_brigade(f->r->output_filters, bb);
+ }
+ else if (ctx->let_it_go || last)
+ {
+ ap_remove_input_filter(f);
+
+ ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r,
+ "mod_dontdothat: letting request go through");
+
+ return rv;
+ }
+ }
+
+ return rv;
+}
+
+/* Implements svn_xml_char_data callback */
+static void
+cdata(void *baton, const char *data, apr_size_t len)
+{
+ dontdothat_filter_ctx *ctx = baton;
+
+ if (ctx->no_soup_for_you || ctx->let_it_go)
+ return;
+
+ switch (ctx->state)
+ {
+ case STATE_IN_SRC_PATH:
+ /* FALLTHROUGH */
+
+ case STATE_IN_DST_PATH:
+ /* FALLTHROUGH */
+
+ case STATE_IN_RECURSIVE:
+ if (! ctx->buffer)
+ ctx->buffer = svn_stringbuf_ncreate(data, len, ctx->r->pool);
+ else
+ svn_stringbuf_appendbytes(ctx->buffer, data, len);
+ break;
+
+ default:
+ break;
+ }
+}
+
+/* Implements svn_xml_start_elem callback */
+static void
+start_element(void *baton, const char *name, const char **attrs)
+{
+ dontdothat_filter_ctx *ctx = baton;
+ const char *sep;
+
+ if (ctx->no_soup_for_you || ctx->let_it_go)
+ return;
+
+ /* XXX Hack. We should be doing real namespace support, but for now we
+ * just skip ahead of any namespace prefix. If someone's sending us
+ * an update-report element outside of the SVN namespace they'll get
+ * what they deserve... */
+ sep = ap_strchr_c(name, ':');
+ if (sep)
+ name = sep + 1;
+
+ switch (ctx->state)
+ {
+ case STATE_BEGINNING:
+ if (strcmp(name, "update-report") == 0)
+ ctx->state = STATE_IN_UPDATE;
+ else if (strcmp(name, "replay-report") == 0 && ctx->cfg->no_replay)
+ {
+ /* XXX it would be useful if there was a way to override this
+ * on a per-user basis... */
+ if (! is_this_legal(ctx, ctx->r->unparsed_uri))
+ ctx->no_soup_for_you = TRUE;
+ else
+ ctx->let_it_go = TRUE;
+ }
+ else
+ ctx->let_it_go = TRUE;
+ break;
+
+ case STATE_IN_UPDATE:
+ if (strcmp(name, "src-path") == 0)
+ {
+ ctx->state = STATE_IN_SRC_PATH;
+ if (ctx->buffer)
+ ctx->buffer->len = 0;
+ }
+ else if (strcmp(name, "dst-path") == 0)
+ {
+ ctx->state = STATE_IN_DST_PATH;
+ if (ctx->buffer)
+ ctx->buffer->len = 0;
+ }
+ else if (strcmp(name, "recursive") == 0)
+ {
+ ctx->state = STATE_IN_RECURSIVE;
+ if (ctx->buffer)
+ ctx->buffer->len = 0;
+ }
+ else
+ ; /* XXX Figure out what else we need to deal with... Switch
+ * has that link-path thing we probably need to look out
+ * for... */
+ break;
+
+ default:
+ break;
+ }
+}
+
+/* Implements svn_xml_end_elem callback */
+static void
+end_element(void *baton, const char *name)
+{
+ dontdothat_filter_ctx *ctx = baton;
+ const char *sep;
+
+ if (ctx->no_soup_for_you || ctx->let_it_go)
+ return;
+
+ /* XXX Hack. We should be doing real namespace support, but for now we
+ * just skip ahead of any namespace prefix. If someone's sending us
+ * an update-report element outside of the SVN namespace they'll get
+ * what they deserve... */
+ sep = ap_strchr_c(name, ':');
+ if (sep)
+ name = sep + 1;
+
+ switch (ctx->state)
+ {
+ case STATE_IN_SRC_PATH:
+ ctx->state = STATE_IN_UPDATE;
+
+ svn_stringbuf_strip_whitespace(ctx->buffer);
+
+ if (! ctx->path_failed && ! is_this_legal(ctx, ctx->buffer->data))
+ ctx->path_failed = TRUE;
+ break;
+
+ case STATE_IN_DST_PATH:
+ ctx->state = STATE_IN_UPDATE;
+
+ svn_stringbuf_strip_whitespace(ctx->buffer);
+
+ if (! ctx->path_failed && ! is_this_legal(ctx, ctx->buffer->data))
+ ctx->path_failed = TRUE;
+ break;
+
+ case STATE_IN_RECURSIVE:
+ ctx->state = STATE_IN_UPDATE;
+
+ svn_stringbuf_strip_whitespace(ctx->buffer);
+
+ /* If this isn't recursive we let it go. */
+ if (strcmp(ctx->buffer->data, "no") == 0)
+ {
+ ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r,
+ "mod_dontdothat: letting nonrecursive request go");
+ ctx->let_it_go = TRUE;
+ }
+ break;
+
+ case STATE_IN_UPDATE:
+ if (strcmp(name, "update-report") == 0)
+ {
+ /* If we made it here without figuring out that this is
+ * nonrecursive, then the path check is our final word
+ * on the subject. */
+
+ if (ctx->path_failed)
+ ctx->no_soup_for_you = TRUE;
+ else
+ ctx->let_it_go = TRUE;
+ }
+ else
+ ; /* XXX Is there other stuff we care about? */
+ break;
+
+ default:
+ abort();
+ }
+}
+
+static svn_boolean_t
+is_valid_wildcard(const char *wc)
+{
+ while (*wc)
+ {
+ if (*wc == '*')
+ {
+ if (wc[1] && wc[1] != '/')
+ return FALSE;
+ }
+
+ ++wc;
+ }
+
+ return TRUE;
+}
+
+static svn_boolean_t
+config_enumerator(const char *wildcard,
+ const char *action,
+ void *baton,
+ apr_pool_t *pool)
+{
+ dontdothat_filter_ctx *ctx = baton;
+
+ if (strcmp(action, "deny") == 0)
+ {
+ if (is_valid_wildcard(wildcard))
+ APR_ARRAY_PUSH(ctx->no_recursive_ops, const char *) = wildcard;
+ else
+ ctx->err = svn_error_createf(APR_EINVAL,
+ NULL,
+ "'%s' is an invalid wildcard",
+ wildcard);
+ }
+ else if (strcmp(action, "allow") == 0)
+ {
+ if (is_valid_wildcard(wildcard))
+ APR_ARRAY_PUSH(ctx->allow_recursive_ops, const char *) = wildcard;
+ else
+ ctx->err = svn_error_createf(APR_EINVAL,
+ NULL,
+ "'%s' is an invalid wildcard",
+ wildcard);
+ }
+ else
+ {
+ ctx->err = svn_error_createf(APR_EINVAL,
+ NULL,
+ "'%s' is not a valid action",
+ action);
+ }
+
+ if (ctx->err)
+ return FALSE;
+ else
+ return TRUE;
+}
+
+static void
+dontdothat_insert_filters(request_rec *r)
+{
+ dontdothat_config_rec *cfg = ap_get_module_config(r->per_dir_config,
+ &dontdothat_module);
+
+ if (! cfg->config_file)
+ return;
+
+ if (strcmp("REPORT", r->method) == 0)
+ {
+ dontdothat_filter_ctx *ctx = apr_pcalloc(r->pool, sizeof(*ctx));
+ svn_config_t *config;
+ svn_error_t *err;
+
+ ctx->r = r;
+
+ ctx->cfg = cfg;
+
+ ctx->allow_recursive_ops = apr_array_make(r->pool, 5, sizeof(char *));
+
+ ctx->no_recursive_ops = apr_array_make(r->pool, 5, sizeof(char *));
+
+ /* XXX is there a way to error out from this point? Would be nice... */
+
+ err = svn_config_read3(&config, cfg->config_file, TRUE,
+ FALSE, TRUE, r->pool);
+ if (err)
+ {
+ char buff[256];
+
+ ap_log_rerror(APLOG_MARK, APLOG_ERR,
+ ((err->apr_err >= APR_OS_START_USERERR &&
+ err->apr_err < APR_OS_START_CANONERR) ?
+ 0 : err->apr_err),
+ r, "Failed to load DontDoThatConfigFile: %s",
+ svn_err_best_message(err, buff, sizeof(buff)));
+
+ svn_error_clear(err);
+
+ return;
+ }
+
+ svn_config_enumerate2(config,
+ "recursive-actions",
+ config_enumerator,
+ ctx,
+ r->pool);
+ if (ctx->err)
+ {
+ char buff[256];
+
+ ap_log_rerror(APLOG_MARK, APLOG_ERR,
+ ((ctx->err->apr_err >= APR_OS_START_USERERR &&
+ ctx->err->apr_err < APR_OS_START_CANONERR) ?
+ 0 : ctx->err->apr_err),
+ r, "Failed to parse DontDoThatConfigFile: %s",
+ svn_err_best_message(ctx->err, buff, sizeof(buff)));
+
+ svn_error_clear(ctx->err);
+
+ return;
+ }
+
+ ctx->state = STATE_BEGINNING;
+
+ ctx->xmlp = svn_xml_make_parser(ctx, start_element, end_element,
+ cdata, r->pool);
+
+ ap_add_input_filter("DONTDOTHAT_FILTER", ctx, r, r->connection);
+ }
+}
+
+static void
+dontdothat_register_hooks(apr_pool_t *pool)
+{
+ ap_hook_insert_filter(dontdothat_insert_filters, NULL, NULL, APR_HOOK_FIRST);
+
+ ap_register_input_filter("DONTDOTHAT_FILTER",
+ dontdothat_filter,
+ NULL,
+ AP_FTYPE_RESOURCE);
+}
+
+module AP_MODULE_DECLARE_DATA dontdothat_module =
+{
+ STANDARD20_MODULE_STUFF,
+ create_dontdothat_dir_config,
+ NULL,
+ NULL,
+ NULL,
+ dontdothat_cmds,
+ dontdothat_register_hooks
+};
diff --git a/tools/server-side/svn-backup-dumps.py b/tools/server-side/svn-backup-dumps.py
new file mode 100755
index 0000000..2f3a231
--- /dev/null
+++ b/tools/server-side/svn-backup-dumps.py
@@ -0,0 +1,692 @@
+#!/usr/bin/env python
+#
+# svn-backup-dumps.py -- Create dumpfiles to backup a subversion repository.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# This script creates dump files from a subversion repository.
+# It is intended for use in cron jobs and post-commit hooks.
+#
+# The basic operation modes are:
+# 1. Create a full dump (revisions 0 to HEAD).
+# 2. Create incremental dumps containing at most N revisions.
+# 3. Create incremental single revision dumps (for use in post-commit).
+# 4. Create incremental dumps containing everything since last dump.
+#
+# All dump files are prefixed with the basename of the repository. All
+# examples below assume that the repository '/srv/svn/repos/src' is
+# dumped so all dumpfiles start with 'src'.
+#
+# Optional functionality:
+# 5. Create gzipped dump files.
+# 6. Create bzipped dump files.
+# 7. Transfer the dumpfile to another host using ftp.
+# 8. Transfer the dumpfile to another host using smb.
+#
+# See also 'svn-backup-dumps.py -h'.
+#
+#
+# 1. Create a full dump (revisions 0 to HEAD).
+#
+# svn-backup-dumps.py <repos> <dumpdir>
+#
+# <repos> Path to the repository.
+# <dumpdir> Directory for storing the dump file.
+#
+# This creates a dump file named 'src.000000-NNNNNN.svndmp.gz'
+# where NNNNNN is the revision number of HEAD.
+#
+#
+# 2. Create incremental dumps containing at most N revisions.
+#
+# svn-backup-dumps.py -c <count> <repos> <dumpdir>
+#
+# <count> Count of revisions per dump file.
+# <repos> Path to the repository.
+# <dumpdir> Directory for storing the dump file.
+#
+# When started the first time with a count of 1000 and if HEAD is
+# at 2923 it creates the following files:
+#
+# src.000000-000999.svndmp.gz
+# src.001000-001999.svndmp.gz
+# src.002000-002923.svndmp.gz
+#
+# Say the next time HEAD is at 3045 it creates these two files:
+#
+# src.002000-002999.svndmp.gz
+# src.003000-003045.svndmp.gz
+#
+#
+# 3. Create incremental single revision dumps (for use in post-commit).
+#
+# svn-backup-dumps.py -r <revnr> <repos> <dumpdir>
+#
+# <revnr> A revision number.
+# <repos> Path to the repository.
+# <dumpdir> Directory for storing the dump file.
+#
+# This creates a dump file named 'src.NNNNNN.svndmp.gz' where
+# NNNNNN is the revision number of HEAD.
+#
+#
+# 4. Create incremental dumps relative to last dump
+#
+# svn-backup-dumps.py -i <repos> <dumpdir>
+#
+# <repos> Path to the repository.
+# <dumpdir> Directory for storing the dump file.
+#
+# When if dumps are performed when HEAD is 2923,
+# then when HEAD is 3045, is creates these files:
+#
+# src.000000-002923.svndmp.gz
+# src.002924-003045.svndmp.gz
+#
+#
+# 5. Create gzipped dump files.
+#
+# svn-backup-dumps.py -z ...
+#
+# ... More options, see 1-4, 7, 8.
+#
+#
+# 6. Create bzipped dump files.
+#
+# svn-backup-dumps.py -b ...
+#
+# ... More options, see 1-4, 7, 8.
+#
+#
+# 7. Transfer the dumpfile to another host using ftp.
+#
+# svn-backup-dumps.py -t ftp:<host>:<user>:<password>:<path> ...
+#
+# <host> Name of the FTP host.
+# <user> Username on the remote host.
+# <password> Password for the user.
+# <path> Subdirectory on the remote host.
+# ... More options, see 1-6.
+#
+# If <path> contains the string '%r' it is replaced by the
+# repository name (basename of the repository path).
+#
+#
+# 8. Transfer the dumpfile to another host using smb.
+#
+# svn-backup-dumps.py -t smb:<share>:<user>:<password>:<path> ...
+#
+# <share> Name of an SMB share in the form '//host/share'.
+# <user> Username on the remote host.
+# <password> Password for the user.
+# <path> Subdirectory of the share.
+# ... More options, see 1-6.
+#
+# If <path> contains the string '%r' it is replaced by the
+# repository name (basename of the repository path).
+#
+#
+#
+# TODO:
+# - find out how to report smbclient errors
+# - improve documentation
+#
+
+__version = "0.6"
+
+import sys
+import os
+if os.name != "nt":
+ import fcntl
+ import select
+import gzip
+import os.path
+import re
+from optparse import OptionParser
+from ftplib import FTP
+from subprocess import Popen, PIPE
+
+try:
+ import bz2
+ have_bz2 = True
+except ImportError:
+ have_bz2 = False
+
+
+class SvnBackupOutput:
+
+ def __init__(self, abspath, filename):
+ self.__filename = filename
+ self.__absfilename = os.path.join(abspath, filename)
+
+ def open(self):
+ pass
+
+ def write(self, data):
+ pass
+
+ def close(self):
+ pass
+
+ def get_filename(self):
+ return self.__filename
+
+ def get_absfilename(self):
+ return self.__absfilename
+
+
+class SvnBackupOutputPlain(SvnBackupOutput):
+
+ def __init__(self, abspath, filename):
+ SvnBackupOutput.__init__(self, abspath, filename)
+
+ def open(self):
+ self.__ofd = open(self.get_absfilename(), "wb")
+
+ def write(self, data):
+ self.__ofd.write(data)
+
+ def close(self):
+ self.__ofd.close()
+
+
+class SvnBackupOutputGzip(SvnBackupOutput):
+
+ def __init__(self, abspath, filename):
+ SvnBackupOutput.__init__(self, abspath, filename + ".gz")
+
+ def open(self):
+ self.__compressor = gzip.GzipFile(filename=self.get_absfilename(),
+ mode="wb")
+
+ def write(self, data):
+ self.__compressor.write(data)
+
+ def close(self):
+ self.__compressor.flush()
+ self.__compressor.close()
+
+
+class SvnBackupOutputBzip2(SvnBackupOutput):
+
+ def __init__(self, abspath, filename):
+ SvnBackupOutput.__init__(self, abspath, filename + ".bz2")
+
+ def open(self):
+ self.__compressor = bz2.BZ2Compressor()
+ self.__ofd = open(self.get_absfilename(), "wb")
+
+ def write(self, data):
+ self.__ofd.write(self.__compressor.compress(data))
+
+ def close(self):
+ self.__ofd.write(self.__compressor.flush())
+ self.__ofd.close()
+
+class SvnBackupOutputCommand(SvnBackupOutput):
+
+ def __init__(self, abspath, filename, file_extension, cmd_path,
+ cmd_options):
+ SvnBackupOutput.__init__(self, abspath, filename + file_extension)
+ self.__cmd_path = cmd_path
+ self.__cmd_options = cmd_options
+
+ def open(self):
+ cmd = [ self.__cmd_path, self.__cmd_options ]
+
+ self.__ofd = open(self.get_absfilename(), "wb")
+ try:
+ proc = Popen(cmd, stdin=PIPE, stdout=self.__ofd, shell=False)
+ except:
+ print((256, "", "Popen failed (%s ...):\n %s" % (cmd[0],
+ str(sys.exc_info()[1]))))
+ sys.exit(256)
+ self.__proc = proc
+ self.__stdin = proc.stdin
+
+ def write(self, data):
+ self.__stdin.write(data)
+
+ def close(self):
+ self.__stdin.close()
+ rc = self.__proc.wait()
+ self.__ofd.close()
+
+class SvnBackupException(Exception):
+
+ def __init__(self, errortext):
+ self.errortext = errortext
+
+ def __str__(self):
+ return self.errortext
+
+class SvnBackup:
+
+ def __init__(self, options, args):
+ # need 3 args: progname, reposname, dumpdir
+ if len(args) != 3:
+ if len(args) < 3:
+ raise SvnBackupException("too few arguments, specify"
+ " repospath and dumpdir.\nuse -h or"
+ " --help option to see help.")
+ else:
+ raise SvnBackupException("too many arguments, specify"
+ " repospath and dumpdir only.\nuse"
+ " -h or --help option to see help.")
+ self.__repospath = args[1]
+ self.__dumpdir = args[2]
+ # check repospath
+ rpathparts = os.path.split(self.__repospath)
+ if len(rpathparts[1]) == 0:
+ # repospath without trailing slash
+ self.__repospath = rpathparts[0]
+ if not os.path.exists(self.__repospath):
+ raise SvnBackupException("repos '%s' does not exist." % self.__repospath)
+ if not os.path.isdir(self.__repospath):
+ raise SvnBackupException("repos '%s' is not a directory." % self.__repospath)
+ for subdir in [ "db", "conf", "hooks" ]:
+ dir = os.path.join(self.__repospath, subdir)
+ if not os.path.isdir(dir):
+ raise SvnBackupException("repos '%s' is not a repository." % self.__repospath)
+ rpathparts = os.path.split(self.__repospath)
+ self.__reposname = rpathparts[1]
+ if self.__reposname in [ "", ".", ".." ]:
+ raise SvnBackupException("couldn't extract repos name from '%s'." % self.__repospath)
+ # check dumpdir
+ if not os.path.exists(self.__dumpdir):
+ raise SvnBackupException("dumpdir '%s' does not exist." % self.__dumpdir)
+ elif not os.path.isdir(self.__dumpdir):
+ raise SvnBackupException("dumpdir '%s' is not a directory." % self.__dumpdir)
+ # set options
+ self.__rev_nr = options.rev
+ self.__count = options.cnt
+ self.__quiet = options.quiet
+ self.__deltas = options.deltas
+ self.__relative_incremental = options.relative_incremental
+
+ # svnadmin/svnlook path
+ self.__svnadmin_path = "svnadmin"
+ if options.svnadmin_path:
+ self.__svnadmin_path = options.svnadmin_path
+ self.__svnlook_path = "svnlook"
+ if options.svnlook_path:
+ self.__svnlook_path = options.svnlook_path
+
+ # check compress option
+ self.__gzip_path = options.gzip_path
+ self.__bzip2_path = options.bzip2_path
+ self.__zip = None
+ compress_options = 0
+ if options.gzip_path != None:
+ compress_options = compress_options + 1
+ if options.bzip2_path != None:
+ compress_options = compress_options + 1
+ if options.bzip2:
+ compress_options = compress_options + 1
+ self.__zip = "bzip2"
+ if options.gzip:
+ compress_options = compress_options + 1
+ self.__zip = "gzip"
+ if compress_options > 1:
+ raise SvnBackupException("--bzip2-path, --gzip-path, -b, -z are "
+ "mutually exclusive.")
+
+ self.__overwrite = False
+ self.__overwrite_all = False
+ if options.overwrite > 0:
+ self.__overwrite = True
+ if options.overwrite > 1:
+ self.__overwrite_all = True
+ self.__transfer = None
+ if options.transfer != None:
+ self.__transfer = options.transfer.split(":")
+ if len(self.__transfer) != 5:
+ if len(self.__transfer) < 5:
+ raise SvnBackupException("too few fields for transfer '%s'." % self.__transfer)
+ else:
+ raise SvnBackupException("too many fields for transfer '%s'." % self.__transfer)
+ if self.__transfer[0] not in [ "ftp", "smb" ]:
+ raise SvnBackupException("unknown transfer method '%s'." % self.__transfer[0])
+
+ def set_nonblock(self, fileobj):
+ fd = fileobj.fileno()
+ n = fcntl.fcntl(fd, fcntl.F_GETFL)
+ fcntl.fcntl(fd, fcntl.F_SETFL, n|os.O_NONBLOCK)
+
+ def exec_cmd(self, cmd, output=None, printerr=False):
+ if os.name == "nt":
+ return self.exec_cmd_nt(cmd, output, printerr)
+ else:
+ return self.exec_cmd_unix(cmd, output, printerr)
+
+ def exec_cmd_unix(self, cmd, output=None, printerr=False):
+ try:
+ proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False)
+ except:
+ return (256, "", "Popen failed (%s ...):\n %s" % (cmd[0],
+ str(sys.exc_info()[1])))
+ stdout = proc.stdout
+ stderr = proc.stderr
+ self.set_nonblock(stdout)
+ self.set_nonblock(stderr)
+ readfds = [ stdout, stderr ]
+ selres = select.select(readfds, [], [])
+ bufout = ""
+ buferr = ""
+ while len(selres[0]) > 0:
+ for fd in selres[0]:
+ buf = fd.read(16384)
+ if len(buf) == 0:
+ readfds.remove(fd)
+ elif fd == stdout:
+ if output:
+ output.write(buf)
+ else:
+ bufout += buf
+ else:
+ if printerr:
+ sys.stdout.write("%s " % buf)
+ else:
+ buferr += buf
+ if len(readfds) == 0:
+ break
+ selres = select.select(readfds, [], [])
+ rc = proc.wait()
+ if printerr:
+ print("")
+ return (rc, bufout, buferr)
+
+ def exec_cmd_nt(self, cmd, output=None, printerr=False):
+ try:
+ proc = Popen(cmd, stdout=PIPE, stderr=None, shell=False)
+ except:
+ return (256, "", "Popen failed (%s ...):\n %s" % (cmd[0],
+ str(sys.exc_info()[1])))
+ stdout = proc.stdout
+ bufout = ""
+ buferr = ""
+ buf = stdout.read(16384)
+ while len(buf) > 0:
+ if output:
+ output.write(buf)
+ else:
+ bufout += buf
+ buf = stdout.read(16384)
+ rc = proc.wait()
+ return (rc, bufout, buferr)
+
+ def get_head_rev(self):
+ cmd = [ self.__svnlook_path, "youngest", self.__repospath ]
+ r = self.exec_cmd(cmd)
+ if r[0] == 0 and len(r[2]) == 0:
+ return int(r[1].strip())
+ else:
+ print(r[2])
+ return -1
+
+ def get_last_dumped_rev(self):
+ filename_regex = re.compile("(.+)\.\d+-(\d+)\.svndmp.*")
+ # start with -1 so the next one will be rev 0
+ highest_rev = -1
+
+ for filename in os.listdir(self.__dumpdir):
+ m = filename_regex.match( filename )
+ if m and (m.group(1) == self.__reposname):
+ rev_end = int(m.group(2))
+
+ if rev_end > highest_rev:
+ # determine the latest revision dumped
+ highest_rev = rev_end
+
+ return highest_rev
+
+ def transfer_ftp(self, absfilename, filename):
+ rc = False
+ try:
+ host = self.__transfer[1]
+ user = self.__transfer[2]
+ passwd = self.__transfer[3]
+ destdir = self.__transfer[4].replace("%r", self.__reposname)
+ ftp = FTP(host, user, passwd)
+ ftp.cwd(destdir)
+ ifd = open(absfilename, "rb")
+ ftp.storbinary("STOR %s" % filename, ifd)
+ ftp.quit()
+ rc = len(ifd.read(1)) == 0
+ ifd.close()
+ except Exception as e:
+ raise SvnBackupException("ftp transfer failed:\n file: '%s'\n error: %s" % \
+ (absfilename, str(e)))
+ return rc
+
+ def transfer_smb(self, absfilename, filename):
+ share = self.__transfer[1]
+ user = self.__transfer[2]
+ passwd = self.__transfer[3]
+ if passwd == "":
+ passwd = "-N"
+ destdir = self.__transfer[4].replace("%r", self.__reposname)
+ cmd = ("smbclient", share, "-U", user, passwd, "-D", destdir,
+ "-c", "put %s %s" % (absfilename, filename))
+ r = self.exec_cmd(cmd)
+ rc = r[0] == 0
+ if not rc:
+ print(r[2])
+ return rc
+
+ def transfer(self, absfilename, filename):
+ if self.__transfer == None:
+ return
+ elif self.__transfer[0] == "ftp":
+ self.transfer_ftp(absfilename, filename)
+ elif self.__transfer[0] == "smb":
+ self.transfer_smb(absfilename, filename)
+ else:
+ print("unknown transfer method '%s'." % self.__transfer[0])
+
+ def create_dump(self, checkonly, overwrite, fromrev, torev=None):
+ revparam = "%d" % fromrev
+ r = "%06d" % fromrev
+ if torev != None:
+ revparam += ":%d" % torev
+ r += "-%06d" % torev
+ filename = "%s.%s.svndmp" % (self.__reposname, r)
+ output = None
+ if self.__bzip2_path:
+ output = SvnBackupOutputCommand(self.__dumpdir, filename, ".bz2",
+ self.__bzip2_path, "-cz" )
+ elif self.__gzip_path:
+ output = SvnBackupOutputCommand(self.__dumpdir, filename, ".gz",
+ self.__gzip_path, "-cf" )
+ elif self.__zip:
+ if self.__zip == "gzip":
+ output = SvnBackupOutputGzip(self.__dumpdir, filename)
+ else:
+ output = SvnBackupOutputBzip2(self.__dumpdir, filename)
+ else:
+ output = SvnBackupOutputPlain(self.__dumpdir, filename)
+ absfilename = output.get_absfilename()
+ realfilename = output.get_filename()
+ if checkonly:
+ return os.path.exists(absfilename)
+ elif os.path.exists(absfilename):
+ if overwrite:
+ print("overwriting " + absfilename)
+ else:
+ print("%s already exists." % absfilename)
+ return True
+ else:
+ print("writing " + absfilename)
+ cmd = [ self.__svnadmin_path, "dump",
+ "--incremental", "-r", revparam, self.__repospath ]
+ if self.__quiet:
+ cmd[2:2] = [ "-q" ]
+ if self.__deltas:
+ cmd[2:2] = [ "--deltas" ]
+ output.open()
+ r = self.exec_cmd(cmd, output, True)
+ output.close()
+ rc = r[0] == 0
+ if rc:
+ self.transfer(absfilename, realfilename)
+ return rc
+
+ def export_single_rev(self):
+ return self.create_dump(False, self.__overwrite, self.__rev_nr)
+
+ def export(self):
+ headrev = self.get_head_rev()
+ if headrev == -1:
+ return False
+ if self.__count is None:
+ return self.create_dump(False, self.__overwrite, 0, headrev)
+ baserev = headrev - (headrev % self.__count)
+ rc = True
+ cnt = self.__count
+ fromrev = baserev - cnt
+ torev = baserev - 1
+ while fromrev >= 0 and rc:
+ if self.__overwrite_all or \
+ not self.create_dump(True, False, fromrev, torev):
+ rc = self.create_dump(False, self.__overwrite_all,
+ fromrev, torev)
+ fromrev -= cnt
+ torev -= cnt
+ else:
+ fromrev = -1
+ if rc:
+ rc = self.create_dump(False, self.__overwrite, baserev, headrev)
+ return rc
+
+ def export_relative_incremental(self):
+ headrev = self.get_head_rev()
+ if headrev == -1:
+ return False
+
+ last_dumped_rev = self.get_last_dumped_rev();
+ if headrev < last_dumped_rev:
+ # that should not happen...
+ return False
+
+ if headrev == last_dumped_rev:
+ # already up-to-date
+ return True
+
+ return self.create_dump(False, False, last_dumped_rev + 1, headrev)
+
+ def execute(self):
+ if self.__rev_nr != None:
+ return self.export_single_rev()
+ elif self.__relative_incremental:
+ return self.export_relative_incremental()
+ else:
+ return self.export()
+
+
+if __name__ == "__main__":
+ usage = "usage: svn-backup-dumps.py [options] repospath dumpdir"
+ parser = OptionParser(usage=usage, version="%prog "+__version)
+ if have_bz2:
+ parser.add_option("-b",
+ action="store_true",
+ dest="bzip2", default=False,
+ help="compress the dump using python bzip2 library.")
+ parser.add_option("-i",
+ action="store_true",
+ dest="relative_incremental", default=False,
+ help="perform incremental relative to last dump.")
+ parser.add_option("--deltas",
+ action="store_true",
+ dest="deltas", default=False,
+ help="pass --deltas to svnadmin dump.")
+ parser.add_option("-c",
+ action="store", type="int",
+ dest="cnt", default=None,
+ help="count of revisions per dumpfile.")
+ parser.add_option("-o",
+ action="store_const", const=1,
+ dest="overwrite", default=0,
+ help="overwrite files.")
+ parser.add_option("-O",
+ action="store_const", const=2,
+ dest="overwrite", default=0,
+ help="overwrite all files.")
+ parser.add_option("-q",
+ action="store_true",
+ dest="quiet", default=False,
+ help="quiet.")
+ parser.add_option("-r",
+ action="store", type="int",
+ dest="rev", default=None,
+ help="revision number for single rev dump.")
+ parser.add_option("-t",
+ action="store", type="string",
+ dest="transfer", default=None,
+ help="transfer dumps to another machine "+
+ "(s.a. --help-transfer).")
+ parser.add_option("-z",
+ action="store_true",
+ dest="gzip", default=False,
+ help="compress the dump using python gzip library.")
+ parser.add_option("--bzip2-path",
+ action="store", type="string",
+ dest="bzip2_path", default=None,
+ help="compress the dump using bzip2 custom command.")
+ parser.add_option("--gzip-path",
+ action="store", type="string",
+ dest="gzip_path", default=None,
+ help="compress the dump using gzip custom command.")
+ parser.add_option("--svnadmin-path",
+ action="store", type="string",
+ dest="svnadmin_path", default=None,
+ help="svnadmin command path.")
+ parser.add_option("--svnlook-path",
+ action="store", type="string",
+ dest="svnlook_path", default=None,
+ help="svnlook command path.")
+ parser.add_option("--help-transfer",
+ action="store_true",
+ dest="help_transfer", default=False,
+ help="shows detailed help for the transfer option.")
+ (options, args) = parser.parse_args(sys.argv)
+ if options.help_transfer:
+ print("Transfer help:")
+ print("")
+ print(" FTP:")
+ print(" -t ftp:<host>:<user>:<password>:<dest-path>")
+ print("")
+ print(" SMB (using smbclient):")
+ print(" -t smb:<share>:<user>:<password>:<dest-path>")
+ print("")
+ sys.exit(0)
+ rc = False
+ try:
+ backup = SvnBackup(options, args)
+ rc = backup.execute()
+ except SvnBackupException as e:
+ print("svn-backup-dumps.py: %s" % e)
+ if rc:
+ print("Everything OK.")
+ sys.exit(0)
+ else:
+ print("An error occurred!")
+ sys.exit(1)
+
+# vim:et:ts=4:sw=4
diff --git a/tools/server-side/svn-populate-node-origins-index.c b/tools/server-side/svn-populate-node-origins-index.c
new file mode 100644
index 0000000..18514b8
--- /dev/null
+++ b/tools/server-side/svn-populate-node-origins-index.c
@@ -0,0 +1,187 @@
+/*
+ * svn-populate-node-origins-index.c : Populate the repository's node
+ * origins index.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_error.h"
+#include "svn_fs.h"
+#include "svn_path.h"
+#include "svn_pools.h"
+#include "svn_repos.h"
+#include "svn_utf.h"
+
+/* Used to terminate lines in large multi-line string literals. */
+#define NL APR_EOL_STR
+
+static const char *usage_summary =
+ "Crawl the Subversion repository located at REPOS-PATH in an attempt to" NL
+ "populate that repository's index of node origins. " NL
+ "" NL
+ "The node origins index is new as of Subversion 1.5, and behaves as a" NL
+ "cache to vastly speed up certain history-querying operations. For" NL
+ "compatibility with repositories created with pre-1.5 versions of" NL
+ "Subversion, Subversion will gracefully handle cache misses by doing a" NL
+ "brute-force calculation of the query answer and lazily populating the" NL
+ "index with answers it calculates. Unfortunately, calculating that" NL
+ "information using the brute-force method (instead of having the" NL
+ "information appear in the index organically) can be very costly." NL
+ "" NL
+ "This tool triggers the lazy index population logic built into" NL
+ "Subversion in a fashion far more efficient than is likely to happen" NL
+ "during typical repository usage. It can be run while the repository" NL
+ "is online, too, without interrupting normal Subversion activities." NL;
+
+/* Print a usage message for this program (PROGNAME), possibly with an
+ error message ERR_MSG, if not NULL. */
+static void
+usage_maybe_with_err(const char *progname, const char *err_msg)
+{
+ FILE *out;
+
+ out = err_msg ? stderr : stdout;
+ fprintf(out, "Usage: %s REPOS-PATH\n\n%s", progname, usage_summary);
+ if (err_msg)
+ fprintf(out, "\nERROR: %s\n", err_msg);
+}
+
+/* Build the node-origins index any newly added items introduced in
+ REVISION in FS. Set *COUNT to the number of new items found. */
+static svn_error_t *
+index_revision_adds(int *count, svn_fs_t *fs,
+ svn_revnum_t revision, apr_pool_t *pool)
+{
+ svn_fs_root_t *root;
+ apr_pool_t *subpool = svn_pool_create(pool);
+
+ svn_fs_path_change_iterator_t *iterator;
+ svn_fs_path_change3_t *change;
+
+ *count = 0;
+ SVN_ERR(svn_fs_revision_root(&root, fs, revision, pool));
+ SVN_ERR(svn_fs_paths_changed3(&iterator, root, pool, subpool));
+ SVN_ERR(svn_fs_path_change_get(&change, iterator));
+
+ while (change)
+ {
+ svn_pool_clear(subpool);
+ if ((change->change_kind == svn_fs_path_change_add)
+ || (change->change_kind == svn_fs_path_change_replace))
+ {
+ if (! (change->copyfrom_path
+ && SVN_IS_VALID_REVNUM(change->copyfrom_rev)))
+ {
+ svn_revnum_t origin;
+ SVN_ERR(svn_fs_node_origin_rev(&origin, root,
+ change->path.data, subpool));
+ (*count)++;
+ }
+ }
+
+ SVN_ERR(svn_fs_path_change_get(&change, iterator));
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+/* Build the node-origins index for the repository located at REPOS_PATH. */
+static svn_error_t *
+build_index(const char *repos_path, apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_revnum_t youngest_rev, i;
+ size_t slotsize;
+ const char *progress_fmt;
+ apr_pool_t *subpool;
+
+ /* Open the repository. */
+ SVN_ERR(svn_repos_open3(&repos, repos_path, NULL, pool, pool));
+
+ /* Get a filesystem object. */
+ fs = svn_repos_fs(repos);
+
+ /* Fetch the youngest revision of the repository. */
+ SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool));
+ slotsize = strlen(apr_ltoa(pool, youngest_rev));
+ progress_fmt = apr_psprintf
+ (pool,
+ "[%%%" APR_SIZE_T_FMT "ld"
+ "/%%%" APR_SIZE_T_FMT "ld] "
+ "Found %%d new lines of history."
+ "\n", slotsize, slotsize);
+
+ /* Now, iterate over all the revisions, calling index_revision_adds(). */
+ subpool = svn_pool_create(pool);
+ for (i = 0; i < youngest_rev; i++)
+ {
+ int count;
+ svn_pool_clear(subpool);
+ SVN_ERR(index_revision_adds(&count, fs, i + 1, subpool));
+ printf(progress_fmt, i + 1, youngest_rev, count);
+ }
+ svn_pool_destroy(subpool);
+
+ return SVN_NO_ERROR;
+}
+
+
+int
+main(int argc, const char **argv)
+{
+ apr_pool_t *pool;
+ svn_error_t *err = SVN_NO_ERROR;
+ const char *repos_path;
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init(argv[0], stderr) == EXIT_FAILURE)
+ return EXIT_FAILURE;
+
+ pool = svn_pool_create(NULL);
+
+ if (argc <= 1)
+ {
+ usage_maybe_with_err(argv[0], "Not enough arguments.");
+ goto cleanup;
+ }
+
+ /* Convert argv[1] into a UTF8, internal-format, canonicalized path. */
+ if ((err = svn_utf_cstring_to_utf8(&repos_path, argv[1], pool)))
+ goto cleanup;
+ repos_path = svn_dirent_internal_style(repos_path, pool);
+ repos_path = svn_dirent_canonicalize(repos_path, pool);
+
+ if ((err = build_index(repos_path, pool)))
+ goto cleanup;
+
+ cleanup:
+ svn_pool_destroy(pool);
+
+ if (err)
+ {
+ svn_handle_error2(err, stderr, FALSE,
+ "svn-populate-node-origins-index: ");
+ return EXIT_FAILURE;
+ }
+ return EXIT_SUCCESS;
+}
diff --git a/tools/server-side/svn_server_log_parse.py b/tools/server-side/svn_server_log_parse.py
new file mode 100755
index 0000000..5ecb104
--- /dev/null
+++ b/tools/server-side/svn_server_log_parse.py
@@ -0,0 +1,460 @@
+#!/usr/bin/python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# TODO: Teach parse_open about capabilities, rather than allowing any
+# words at all.
+
+"""Parse subversion server operational logs.
+
+SVN-ACTION strings
+------------------
+
+Angle brackets denote a variable, e.g. 'commit r<N>' means you'll see
+lines like 'commit r17' for this action.
+
+<N> and <M> are revision numbers.
+
+<PATH>, <FROM-PATH>, and <TO-PATH> mean a URI-encoded path relative to
+the repository root, including a leading '/'.
+
+<REVPROP> means a revision property, e.g. 'svn:log'.
+
+<I> represents a svn_mergeinfo_inheritance_t value and is one of these
+words: explicit inherited nearest-ancestor.
+
+<D> represents a svn_depth_t value and is one of these words: empty
+files immediates infinity. If the depth value for the operation was
+svn_depth_unknown, the depth= portion is absent entirely.
+
+The get-mergeinfo and log actions use lists for paths and revprops.
+The lists are enclosed in parentheses and each item is separated by a
+space (spaces in paths are encoded as %20).
+
+The words will *always* be in this order, though some may be absent.
+
+General::
+
+ change-rev-prop r<N> <REVPROP>
+ commit r<N>
+ get-dir <PATH> r<N> text? props?
+ get-file <PATH> r<N> text? props?
+ lock (<PATH> ...) steal?
+ rev-proplist r<N>
+ unlock (<PATH> ...) break?
+
+Reports::
+
+ get-file-revs <PATH> r<N>:<M> include-merged-revisions?
+ get-mergeinfo (<PATH> ...) <I> include-descendants?
+ log (<PATH> ...) r<N>:<M> limit=<N>? discover-changed-paths? strict? include-merged-revisions? revprops=all|(<REVPROP> ...)?
+ replay <PATH> r<N>
+
+The update report::
+
+ checkout-or-export <PATH> r<N> depth=<D>?
+ diff <FROM-PATH>@<N> <TO-PATH>@<M> depth=<D>? ignore-ancestry?
+ diff <PATH> r<N>:<M> depth=<D>? ignore-ancestry?
+ status <PATH> r<N> depth=<D>?
+ switch <FROM-PATH> <TO-PATH>@<N> depth=<D>?
+ update <PATH> r<N> depth=<D>? send-copyfrom-args?
+"""
+
+
+import re
+try:
+ # Python >=3.0
+ from urllib.parse import unquote as urllib_parse_unquote
+except ImportError:
+ # Python <3.0
+ from urllib import unquote as urllib_parse_unquote
+
+import svn.core
+
+#
+# Valid words for _parse_depth and _parse_mergeinfo_inheritance
+#
+
+DEPTH_WORDS = ['empty', 'files', 'immediates', 'infinity']
+INHERITANCE_WORDS = {
+ 'explicit': svn.core.svn_mergeinfo_explicit,
+ 'inherited': svn.core.svn_mergeinfo_inherited,
+ 'nearest-ancestor': svn.core.svn_mergeinfo_nearest_ancestor,
+}
+
+#
+# Patterns for _match
+#
+
+# <PATH>
+pPATH = r'(/\S*)'
+# (<PATH> ...)
+pPATHS = r'\(([^)]*)\)'
+# r<N>
+pREVNUM = r'r(\d+)'
+# (<N> ...)
+pREVNUMS = r'\(((\d+\s*)*)\)'
+# r<N>:<M>
+pREVRANGE = r'r(-?\d+):(-?\d+)'
+# <PATH>@<N>
+pPATHREV = pPATH + r'@(\d+)'
+pWORD = r'(\S+)'
+pPROPERTY = pWORD
+# depth=<D>?
+pDEPTH = 'depth=' + pWORD
+
+#
+# Exceptions
+#
+
+class Error(Exception): pass
+class BadDepthError(Error):
+ def __init__(self, value):
+ Error.__init__(self, 'bad svn_depth_t value ' + value)
+class BadMergeinfoInheritanceError(Error):
+ def __init__(self, value):
+ Error.__init__(self, 'bad svn_mergeinfo_inheritance_t value ' + value)
+class MatchError(Error):
+ def __init__(self, pattern, line):
+ Error.__init__(self, '/%s/ does not match log line:\n%s'
+ % (pattern, line))
+
+
+#
+# Helper functions
+#
+
+# TODO: Move to kitchensink.c like svn_depth_from_word?
+try:
+ from svn.core import svn_inheritance_from_word
+except ImportError:
+ def svn_inheritance_from_word(word):
+ try:
+ return INHERITANCE_WORDS[word]
+ except KeyError:
+ # XXX svn_inheritance_to_word uses explicit as default so...
+ return svn.core.svn_mergeinfo_explicit
+
+def _parse_depth(word):
+ if word is None:
+ return svn.core.svn_depth_unknown
+ if word not in DEPTH_WORDS:
+ raise BadDepthError(word)
+ return svn.core.svn_depth_from_word(word)
+
+def _parse_mergeinfo_inheritance(word):
+ if word not in INHERITANCE_WORDS:
+ raise BadMergeinfoInheritanceError(word)
+ return svn_inheritance_from_word(word)
+
+def _match(line, *patterns):
+ """Return a re.match object from matching patterns against line.
+
+ All optional arguments must be strings suitable for ''.join()ing
+ into a single pattern string for re.match. The last optional
+ argument may instead be a list of such strings, which will be
+ joined into the final pattern as *optional* matches.
+
+ Raises:
+ Error -- if re.match returns None (i.e. no match)
+ """
+ if isinstance(patterns[-1], list):
+ optional = patterns[-1]
+ patterns = patterns[:-1]
+ else:
+ optional = []
+ pattern = r'\s+'.join(patterns)
+ pattern += ''.join([r'(\s+' + x + ')?' for x in optional])
+ m = re.match(pattern, line)
+ if m is None:
+ raise MatchError(pattern, line)
+ return m
+
+
+class Parser(object):
+ """Subclass this and define the handle_ methods according to the
+ "SVN-ACTION strings" section of this module's documentation. For
+ example, "lock <PATH> steal?" => def handle_lock(self, path, steal)
+ where steal will be True if "steal" was present.
+
+ See the end of test_svn_server_log_parse.py for a complete example.
+ """
+ def parse(self, line):
+ """Parse line and call appropriate handle_ method.
+
+ Returns one of:
+ - line remaining after the svn action, if one was parsed
+ - whatever your handle_unknown implementation returns
+
+ Raises:
+ BadDepthError -- for bad svn_depth_t values
+ BadMergeinfoInheritanceError -- for bad svn_mergeinfo_inheritance_t
+ values
+ Error -- any other parse error
+ """
+ self.line = line
+ words = self.split_line = line.split(' ')
+ try:
+ method = getattr(self, '_parse_' + words[0].replace('-', '_'))
+ except AttributeError:
+ return self.handle_unknown(self.line)
+ return method(' '.join(words[1:]))
+
+ def _parse_commit(self, line):
+ m = _match(line, pREVNUM)
+ self.handle_commit(int(m.group(1)))
+ return line[m.end():]
+
+ def _parse_open(self, line):
+ pINT = r'(\d+)'
+ pCAP = r'cap=\(([^)]*)\)'
+ pCLIENT = pWORD
+ m = _match(line, pINT, pCAP, pPATH, pCLIENT, pCLIENT)
+ protocol = int(m.group(1))
+ if m.group(2) is None:
+ capabilities = []
+ else:
+ capabilities = m.group(2).split()
+ path = m.group(3)
+ ra_client = urllib_parse_unquote(m.group(4))
+ client = urllib_parse_unquote(m.group(5))
+ self.handle_open(protocol, capabilities, path, ra_client, client)
+ return line[m.end():]
+
+ def _parse_reparent(self, line):
+ m = _match(line, pPATH)
+ self.handle_reparent(urllib_parse_unquote(m.group(1)))
+ return line[m.end():]
+
+ def _parse_get_latest_rev(self, line):
+ self.handle_get_latest_rev()
+ return line
+
+ def _parse_get_dated_rev(self, line):
+ m = _match(line, pWORD)
+ self.handle_get_dated_rev(m.group(1))
+ return line[m.end():]
+
+ def _parse_get_dir(self, line):
+ m = _match(line, pPATH, pREVNUM, ['text', 'props'])
+ self.handle_get_dir(urllib_parse_unquote(m.group(1)), int(m.group(2)),
+ m.group(3) is not None,
+ m.group(4) is not None)
+ return line[m.end():]
+
+ def _parse_get_file(self, line):
+ m = _match(line, pPATH, pREVNUM, ['text', 'props'])
+ self.handle_get_file(urllib_parse_unquote(m.group(1)), int(m.group(2)),
+ m.group(3) is not None,
+ m.group(4) is not None)
+ return line[m.end():]
+
+ def _parse_lock(self, line):
+ m = _match(line, pPATHS, ['steal'])
+ paths = [urllib_parse_unquote(x) for x in m.group(1).split()]
+ self.handle_lock(paths, m.group(2) is not None)
+ return line[m.end():]
+
+ def _parse_change_rev_prop(self, line):
+ m = _match(line, pREVNUM, pPROPERTY)
+ self.handle_change_rev_prop(int(m.group(1)),
+ urllib_parse_unquote(m.group(2)))
+ return line[m.end():]
+
+ def _parse_rev_proplist(self, line):
+ m = _match(line, pREVNUM)
+ self.handle_rev_proplist(int(m.group(1)))
+ return line[m.end():]
+
+ def _parse_rev_prop(self, line):
+ m = _match(line, pREVNUM, pPROPERTY)
+ self.handle_rev_prop(int(m.group(1)), urllib_parse_unquote(m.group(2)))
+ return line[m.end():]
+
+ def _parse_unlock(self, line):
+ m = _match(line, pPATHS, ['break'])
+ paths = [urllib_parse_unquote(x) for x in m.group(1).split()]
+ self.handle_unlock(paths, m.group(2) is not None)
+ return line[m.end():]
+
+ def _parse_get_lock(self, line):
+ m = _match(line, pPATH)
+ self.handle_get_lock(urllib_parse_unquote(m.group(1)))
+ return line[m.end():]
+
+ def _parse_get_locks(self, line):
+ m = _match(line, pPATH)
+ self.handle_get_locks(urllib_parse_unquote(m.group(1)))
+ return line[m.end():]
+
+ def _parse_get_locations(self, line):
+ m = _match(line, pPATH, pREVNUMS)
+ path = urllib_parse_unquote(m.group(1))
+ revnums = [int(x) for x in m.group(2).split()]
+ self.handle_get_locations(path, revnums)
+ return line[m.end():]
+
+ def _parse_get_location_segments(self, line):
+ m = _match(line, pPATHREV, pREVRANGE)
+ path = urllib_parse_unquote(m.group(1))
+ peg = int(m.group(2))
+ left = int(m.group(3))
+ right = int(m.group(4))
+ self.handle_get_location_segments(path, peg, left, right)
+ return line[m.end():]
+
+ def _parse_get_file_revs(self, line):
+ m = _match(line, pPATH, pREVRANGE, ['include-merged-revisions'])
+ path = urllib_parse_unquote(m.group(1))
+ left = int(m.group(2))
+ right = int(m.group(3))
+ include_merged_revisions = m.group(4) is not None
+ self.handle_get_file_revs(path, left, right, include_merged_revisions)
+ return line[m.end():]
+
+ def _parse_get_mergeinfo(self, line):
+ # <I>
+ pMERGEINFO_INHERITANCE = pWORD
+ pINCLUDE_DESCENDANTS = pWORD
+ m = _match(line,
+ pPATHS, pMERGEINFO_INHERITANCE, ['include-descendants'])
+ paths = [urllib_parse_unquote(x) for x in m.group(1).split()]
+ inheritance = _parse_mergeinfo_inheritance(m.group(2))
+ include_descendants = m.group(3) is not None
+ self.handle_get_mergeinfo(paths, inheritance, include_descendants)
+ return line[m.end():]
+
+ def _parse_log(self, line):
+ # limit=<N>?
+ pLIMIT = r'limit=(\d+)'
+ # revprops=all|(<REVPROP> ...)?
+ pREVPROPS = r'revprops=(all|\(([^)]+)\))'
+ m = _match(line, pPATHS, pREVRANGE,
+ [pLIMIT, 'discover-changed-paths', 'strict',
+ 'include-merged-revisions', pREVPROPS])
+ paths = [urllib_parse_unquote(x) for x in m.group(1).split()]
+ left = int(m.group(2))
+ right = int(m.group(3))
+ if m.group(5) is None:
+ limit = 0
+ else:
+ limit = int(m.group(5))
+ discover_changed_paths = m.group(6) is not None
+ strict = m.group(7) is not None
+ include_merged_revisions = m.group(8) is not None
+ if m.group(10) == 'all':
+ revprops = None
+ else:
+ if m.group(11) is None:
+ revprops = []
+ else:
+ revprops = [urllib_parse_unquote(x) for x in m.group(11).split()]
+ self.handle_log(paths, left, right, limit, discover_changed_paths,
+ strict, include_merged_revisions, revprops)
+ return line[m.end():]
+
+ def _parse_check_path(self, line):
+ m = _match(line, pPATHREV)
+ path = urllib_parse_unquote(m.group(1))
+ revnum = int(m.group(2))
+ self.handle_check_path(path, revnum)
+ return line[m.end():]
+
+ def _parse_stat(self, line):
+ m = _match(line, pPATHREV)
+ path = urllib_parse_unquote(m.group(1))
+ revnum = int(m.group(2))
+ self.handle_stat(path, revnum)
+ return line[m.end():]
+
+ def _parse_replay(self, line):
+ m = _match(line, pPATH, pREVNUM)
+ path = urllib_parse_unquote(m.group(1))
+ revision = int(m.group(2))
+ self.handle_replay(path, revision)
+ return line[m.end():]
+
+ # the update report
+
+ def _parse_checkout_or_export(self, line):
+ m = _match(line, pPATH, pREVNUM, [pDEPTH])
+ path = urllib_parse_unquote(m.group(1))
+ revision = int(m.group(2))
+ depth = _parse_depth(m.group(4))
+ self.handle_checkout_or_export(path, revision, depth)
+ return line[m.end():]
+
+ def _parse_diff(self, line):
+ # First, try 1-path form.
+ try:
+ m = _match(line, pPATH, pREVRANGE, [pDEPTH, 'ignore-ancestry'])
+ f = self._parse_diff_1path
+ except Error:
+ # OK, how about 2-path form?
+ m = _match(line, pPATHREV, pPATHREV, [pDEPTH, 'ignore-ancestry'])
+ f = self._parse_diff_2paths
+ return f(line, m)
+
+ def _parse_diff_1path(self, line, m):
+ path = urllib_parse_unquote(m.group(1))
+ left = int(m.group(2))
+ right = int(m.group(3))
+ depth = _parse_depth(m.group(5))
+ ignore_ancestry = m.group(6) is not None
+ self.handle_diff_1path(path, left, right,
+ depth, ignore_ancestry)
+ return line[m.end():]
+
+ def _parse_diff_2paths(self, line, m):
+ from_path = urllib_parse_unquote(m.group(1))
+ from_rev = int(m.group(2))
+ to_path = urllib_parse_unquote(m.group(3))
+ to_rev = int(m.group(4))
+ depth = _parse_depth(m.group(6))
+ ignore_ancestry = m.group(7) is not None
+ self.handle_diff_2paths(from_path, from_rev, to_path, to_rev,
+ depth, ignore_ancestry)
+ return line[m.end():]
+
+ def _parse_status(self, line):
+ m = _match(line, pPATH, pREVNUM, [pDEPTH])
+ path = urllib_parse_unquote(m.group(1))
+ revision = int(m.group(2))
+ depth = _parse_depth(m.group(4))
+ self.handle_status(path, revision, depth)
+ return line[m.end():]
+
+ def _parse_switch(self, line):
+ m = _match(line, pPATH, pPATHREV, [pDEPTH])
+ from_path = urllib_parse_unquote(m.group(1))
+ to_path = urllib_parse_unquote(m.group(2))
+ to_rev = int(m.group(3))
+ depth = _parse_depth(m.group(5))
+ self.handle_switch(from_path, to_path, to_rev, depth)
+ return line[m.end():]
+
+ def _parse_update(self, line):
+ m = _match(line, pPATH, pREVNUM, [pDEPTH, 'send-copyfrom-args'])
+ path = urllib_parse_unquote(m.group(1))
+ revision = int(m.group(2))
+ depth = _parse_depth(m.group(4))
+ send_copyfrom_args = m.group(5) is not None
+ self.handle_update(path, revision, depth, send_copyfrom_args)
+ return line[m.end():]
diff --git a/tools/server-side/svnauthz.c b/tools/server-side/svnauthz.c
new file mode 100644
index 0000000..fc6cd89
--- /dev/null
+++ b/tools/server-side/svnauthz.c
@@ -0,0 +1,745 @@
+/*
+ * svnauthz.c : Tool for working with authz files.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_dirent_uri.h"
+#include "svn_opt.h"
+#include "svn_pools.h"
+#include "svn_repos.h"
+#include "svn_utf.h"
+#include "svn_path.h"
+
+#include "private/svn_fspath.h"
+#include "private/svn_cmdline_private.h"
+
+
+/*** Option Processing. ***/
+
+enum svnauthz__cmdline_options_t
+{
+ svnauthz__version = SVN_OPT_FIRST_LONGOPT_ID,
+ svnauthz__username,
+ svnauthz__path,
+ svnauthz__repos,
+ svnauthz__is,
+ svnauthz__groups_file
+};
+
+/* Option codes and descriptions.
+ *
+ * The entire list must be terminated with an entry of nulls.
+ */
+static const apr_getopt_option_t options_table[] =
+{
+ {"help", 'h', 0, ("show help on a subcommand")},
+ {NULL, '?', 0, ("show help on a subcommand")},
+ {"version", svnauthz__version, 0, ("show program version information")},
+ {"username", svnauthz__username, 1, ("username to check access of")},
+ {"path", svnauthz__path, 1, ("path within repository to check access of")},
+ {"repository", svnauthz__repos, 1, ("repository authz name")},
+ {"transaction", 't', 1, ("transaction id")},
+ {"is", svnauthz__is, 1,
+ ("instead of outputting, test if the access is\n"
+ " "
+ "exactly ARG\n"
+ " "
+ "ARG can be one of the following values:\n"
+ " "
+ " rw write access (which also implies read)\n"
+ " "
+ " r read-only access\n"
+ " "
+ " no no access")
+ },
+ {"groups-file", svnauthz__groups_file, 1,
+ ("use the groups from file ARG")},
+ {"recursive", 'R', 0,
+ ("determine recursive access to PATH")},
+ {0, 0, 0, 0}
+};
+
+struct svnauthz_opt_state
+{
+ svn_boolean_t help;
+ svn_boolean_t version;
+ svn_boolean_t recursive;
+ const char *authz_file;
+ const char *groups_file;
+ const char *username;
+ const char *fspath;
+ const char *repos_name;
+ const char *txn;
+ const char *repos_path;
+ const char *is;
+};
+
+/* The name of this binary in 1.7 and earlier. */
+#define SVNAUTHZ_COMPAT_NAME "svnauthz-validate"
+
+/* Libtool command prefix */
+#define SVNAUTHZ_LT_PREFIX "lt-"
+
+
+/*** Subcommands. */
+
+static svn_opt_subcommand_t
+ subcommand_help,
+ subcommand_validate,
+ subcommand_accessof;
+
+/* Array of available subcommands.
+ * The entire list must be terminated with an entry of nulls.
+ */
+static const svn_opt_subcommand_desc2_t cmd_table[] =
+{
+ {"help", subcommand_help, {"?", "h"},
+ ("usage: svnauthz help [SUBCOMMAND...]\n\n"
+ "Describe the usage of this program or its subcommands.\n"),
+ {0} },
+ {"validate", subcommand_validate, {0} /* no aliases */,
+ ("Checks the syntax of an authz file.\n"
+ "usage: 1. svnauthz validate TARGET\n"
+ " 2. svnauthz validate --transaction TXN REPOS_PATH FILE_PATH\n\n"
+ " 1. Loads and validates the syntax of the authz file at TARGET.\n"
+ " TARGET can be a path to a file or an absolute file:// URL to an authz\n"
+ " file in a repository, but cannot be a repository relative URL (^/).\n\n"
+ " 2. Loads and validates the syntax of the authz file at FILE_PATH in the\n"
+ " transaction TXN in the repository at REPOS_PATH.\n\n"
+ "Returns:\n"
+ " 0 when syntax is OK.\n"
+ " 1 when syntax is invalid.\n"
+ " 2 operational error\n"
+ ),
+ {'t'} },
+ {"accessof", subcommand_accessof, {0} /* no aliases */,
+ ("Print or test the permissions set by an authz file.\n"
+ "usage: 1. svnauthz accessof TARGET\n"
+ " 2. svnauthz accessof -t TXN REPOS_PATH FILE_PATH\n"
+ "\n"
+ " 1. Prints the access of USER to PATH based on authorization file at TARGET.\n"
+ " TARGET can be a path to a file or an absolute file:// URL to an authz\n"
+ " file in a repository, but cannot be a repository relative URL (^/).\n"
+ "\n"
+ " 2. Prints the access of USER to PATH based on authz file at FILE_PATH in the\n"
+ " transaction TXN in the repository at REPOS_PATH.\n"
+ "\n"
+ " USER is the argument to the --username option; if that option is not\n"
+ " provided, then access of an anonymous user will be printed or tested.\n"
+ "\n"
+ " PATH is the argument to the --path option; if that option is not provided,\n"
+ " the maximal access to any path in the repository will be considered.\n"
+ "\n"
+ "Outputs one of the following:\n"
+ " rw write access (which also implies read)\n"
+ " r read access\n"
+ " no no access\n"
+ "\n"
+ "Returns:\n"
+ " 0 when syntax is OK and '--is' argument (if any) matches.\n"
+ " 1 when syntax is invalid.\n"
+ " 2 operational error\n"
+ " 3 when '--is' argument doesn't match\n"
+ ),
+ {'t', svnauthz__username, svnauthz__path, svnauthz__repos, svnauthz__is,
+ svnauthz__groups_file, 'R'} },
+ { NULL, NULL, {0}, NULL, {0} }
+};
+
+static svn_error_t *
+subcommand_help(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ struct svnauthz_opt_state *opt_state = baton;
+ const char *header =
+ ("general usage: svnauthz SUBCOMMAND TARGET [ARGS & OPTIONS ...]\n"
+ " " SVNAUTHZ_COMPAT_NAME " TARGET\n\n"
+ "If the command name starts with '" SVNAUTHZ_COMPAT_NAME "', runs in\n"
+ "pre-1.8 compatibility mode: run the 'validate' subcommand on TARGET.\n\n"
+ "Type 'svnauthz help <subcommand>' for help on a specific subcommand.\n"
+ "Type 'svnauthz --version' to see the program version.\n\n"
+ "Available subcommands:\n");
+
+ const char *fs_desc_start
+ = ("The following repository back-end (FS) modules are available:\n\n");
+
+ svn_stringbuf_t *version_footer;
+
+ version_footer = svn_stringbuf_create(fs_desc_start, pool);
+ SVN_ERR(svn_fs_print_modules(version_footer, pool));
+
+ SVN_ERR(svn_opt_print_help4(os, "svnauthz",
+ opt_state ? opt_state->version : FALSE,
+ FALSE, /* quiet */
+ FALSE, /* verbose */
+ version_footer->data,
+ header, cmd_table, options_table, NULL, NULL,
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Loads the fs FILENAME contents into *CONTENTS ensuring that the
+ corresponding node is a file. Using POOL for allocations. */
+static svn_error_t *
+read_file_contents(svn_stream_t **contents, const char *filename,
+ svn_fs_root_t *root, apr_pool_t *pool)
+{
+ svn_node_kind_t node_kind;
+
+ /* Make sure the path is a file */
+ SVN_ERR(svn_fs_check_path(&node_kind, root, filename, pool));
+ if (node_kind != svn_node_file)
+ return svn_error_createf(SVN_ERR_FS_NOT_FILE, NULL,
+ "Path '%s' is not a file", filename);
+
+ SVN_ERR(svn_fs_file_contents(contents, root, filename, pool));
+
+ return SVN_NO_ERROR;
+}
+
+/* Loads the authz config into *AUTHZ from the file at AUTHZ_FILE
+ in repository at REPOS_PATH from the transaction TXN_NAME. If GROUPS_FILE
+ is set, the resulting *AUTHZ will be constructed from AUTHZ_FILE with
+ global groups taken from GROUPS_FILE. Using POOL for allocations. */
+static svn_error_t *
+get_authz_from_txn(svn_authz_t **authz, const char *repos_path,
+ const char *authz_file, const char *groups_file,
+ const char *txn_name, apr_pool_t *pool)
+{
+ svn_repos_t *repos;
+ svn_fs_t *fs;
+ svn_fs_txn_t *txn;
+ svn_fs_root_t *root;
+ svn_stream_t *authz_contents;
+ svn_stream_t *groups_contents;
+ svn_error_t *err;
+
+ /* Open up the repository and find the transaction root */
+ SVN_ERR(svn_repos_open3(&repos, repos_path, NULL, pool, pool));
+ fs = svn_repos_fs(repos);
+ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, pool));
+ SVN_ERR(svn_fs_txn_root(&root, txn, pool));
+
+ /* Get the authz file contents. */
+ SVN_ERR(read_file_contents(&authz_contents, authz_file, root, pool));
+
+ /* Get the groups file contents if needed. */
+ if (groups_file)
+ SVN_ERR(read_file_contents(&groups_contents, groups_file, root, pool));
+ else
+ groups_contents = NULL;
+
+ err = svn_repos_authz_parse(authz, authz_contents, groups_contents, pool);
+
+ /* Add the filename to the error stack since the parser doesn't have it. */
+ if (err != SVN_NO_ERROR)
+ return svn_error_createf(err->apr_err, err,
+ "Error parsing authz file: '%s':", authz_file);
+
+ return SVN_NO_ERROR;
+}
+
+/* Loads the authz config into *AUTHZ from OPT_STATE->AUTHZ_FILE. If
+ OPT_STATE->GROUPS_FILE is set, loads the global groups from it.
+ If OPT_STATE->TXN is set then OPT_STATE->AUTHZ_FILE and
+ OPT_STATE->GROUPS_FILE are treated as fspaths in repository at
+ OPT_STATE->REPOS_PATH. */
+static svn_error_t *
+get_authz(svn_authz_t **authz, struct svnauthz_opt_state *opt_state,
+ apr_pool_t *pool)
+{
+ /* Read the access file and validate it. */
+ if (opt_state->txn)
+ return get_authz_from_txn(authz, opt_state->repos_path,
+ opt_state->authz_file,
+ opt_state->groups_file,
+ opt_state->txn, pool);
+
+ /* Else */
+ return svn_repos_authz_read3(authz, opt_state->authz_file,
+ opt_state->groups_file,
+ TRUE, NULL, pool, pool);
+}
+
+static svn_error_t *
+subcommand_validate(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ struct svnauthz_opt_state *opt_state = baton;
+ svn_authz_t *authz;
+
+ /* Not much to do here since just loading the authz file also validates. */
+ return get_authz(&authz, opt_state, pool);
+}
+
+static svn_error_t *
+subcommand_accessof(apr_getopt_t *os, void *baton, apr_pool_t *pool)
+{
+ svn_authz_t *authz;
+ svn_boolean_t read_access = FALSE, write_access = FALSE;
+ svn_boolean_t check_r = FALSE, check_rw = FALSE, check_no = FALSE;
+ svn_error_t *err;
+ struct svnauthz_opt_state *opt_state = baton;
+ const char *user = opt_state->username;
+ const char *path = opt_state->fspath;
+ const char *repos = opt_state->repos_name;
+ const char *is = opt_state->is;
+ svn_repos_authz_access_t request;
+
+ if (opt_state->recursive && !path)
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("--recursive not valid without --path"));
+
+ /* Handle is argument parsing/allowed values */
+ if (is) {
+ if (0 == strcmp(is, "rw"))
+ check_rw = TRUE;
+ else if (0 == strcmp(is, "r"))
+ check_r = TRUE;
+ else if (0 == strcmp(is, "no"))
+ check_no = TRUE;
+ else
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("'%s' is not a valid argument for --is"), is);
+ }
+
+ SVN_ERR(get_authz(&authz, opt_state, pool));
+
+
+ request = svn_authz_write;
+ if (opt_state->recursive)
+ request |= svn_authz_recursive;
+ err = svn_repos_authz_check_access(authz, repos, path, user,
+ request, &write_access,
+ pool);
+
+ if (!write_access && !err)
+ {
+ request = svn_authz_read;
+ if (opt_state->recursive)
+ request |= svn_authz_recursive;
+ err = svn_repos_authz_check_access(authz, repos, path, user,
+ request, &read_access,
+ pool);
+ }
+
+ if (!err)
+ {
+ const char *access_str = write_access ? "rw" : read_access ? "r" : "no";
+
+ if (is)
+ {
+ /* Check that --is argument matches.
+ * The errors returned here are not strictly correct, but
+ * none of the other code paths will generate them and they
+ * roughly mean what we're saying here. */
+ if (check_rw && !write_access)
+ err = svn_error_createf(SVN_ERR_AUTHZ_UNWRITABLE, NULL,
+ ("%s is '%s', not writable"),
+ path ? path : ("Repository"), access_str);
+ else if (check_r && !read_access)
+ err = svn_error_createf(SVN_ERR_AUTHZ_UNREADABLE, NULL,
+ ("%s is '%s', not read only"),
+ path ? path : ("Repository"), access_str);
+ else if (check_no && (read_access || write_access))
+ err = svn_error_createf(SVN_ERR_AUTHZ_PARTIALLY_READABLE,
+ NULL, ("%s is '%s', not no access"),
+ path ? path : ("Repository"), access_str);
+ }
+ else
+ {
+ err = svn_cmdline_printf(pool, "%s\n", access_str);
+ }
+ }
+
+ return err;
+}
+
+
+
+/*** Main. ***/
+
+/* A redefinition of EXIT_FAILURE since our contract demands that we
+ exit with 2 for internal failures. */
+#undef EXIT_FAILURE
+#define EXIT_FAILURE 2
+
+/* Return TRUE if the UI of 'svnauthz-validate' (svn 1.7 and earlier)
+ should be emulated, given argv[0]. */
+static svn_boolean_t
+use_compat_mode(const char *cmd, apr_pool_t *pool)
+{
+ cmd = svn_dirent_internal_style(cmd, pool);
+ cmd = svn_dirent_basename(cmd, NULL);
+
+ /* Skip over the Libtool command prefix if it exists on the command. */
+ if (0 == strncmp(SVNAUTHZ_LT_PREFIX, cmd, sizeof(SVNAUTHZ_LT_PREFIX)-1))
+ cmd += sizeof(SVNAUTHZ_LT_PREFIX) - 1;
+
+ /* Deliberately look only for the start of the name to deal with
+ the executable extension on some platforms. */
+ return 0 == strncmp(SVNAUTHZ_COMPAT_NAME, cmd,
+ sizeof(SVNAUTHZ_COMPAT_NAME)-1);
+}
+
+/* Canonicalize ACCESS_FILE into *CANONICALIZED_ACCESS_FILE based on the type
+ of argument. Error out on unsupported path types. If WITHIN_TXN is set,
+ ACCESS_FILE has to be a fspath in the repo. Use POOL for allocations. */
+static svn_error_t *
+canonicalize_access_file(const char **canonicalized_access_file,
+ const char *access_file,
+ svn_boolean_t within_txn,
+ apr_pool_t *pool)
+{
+ if (svn_path_is_repos_relative_url(access_file))
+ {
+ /* Can't accept repos relative urls since we don't have the path to
+ * the repository. */
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("'%s' is a repository relative URL when it "
+ "should be a local path or file:// URL"),
+ access_file);
+ }
+ else if (svn_path_is_url(access_file))
+ {
+ if (within_txn)
+ {
+ /* Don't allow urls with transaction argument. */
+ return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("'%s' is a URL when it should be a "
+ "repository-relative path"),
+ access_file);
+ }
+
+ *canonicalized_access_file = svn_uri_canonicalize(access_file, pool);
+ }
+ else if (within_txn)
+ {
+ /* Transaction flag means this has to be a fspath to the access file
+ * in the repo. */
+ *canonicalized_access_file =
+ svn_fspath__canonicalize(access_file, pool);
+ }
+ else
+ {
+ /* If it isn't a URL and there's no transaction flag then it's a
+ * dirent to the access file on local disk. */
+ *canonicalized_access_file =
+ svn_dirent_internal_style(access_file, pool);
+ }
+
+ return SVN_NO_ERROR;
+}
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ svn_error_t *err;
+
+ const svn_opt_subcommand_desc2_t *subcommand = NULL;
+ struct svnauthz_opt_state opt_state = { 0 };
+ apr_getopt_t *os;
+ apr_array_header_t *received_opts;
+ int i;
+
+ /* Initialize the FS library. */
+ SVN_ERR(svn_fs_initialize(pool));
+
+ received_opts = apr_array_make(pool, SVN_OPT_MAX_OPTIONS, sizeof(int));
+
+ /* Initialize opt_state */
+ opt_state.username = opt_state.fspath = opt_state.repos_name = NULL;
+ opt_state.txn = opt_state.repos_path = opt_state.groups_file = NULL;
+
+ /* Parse options. */
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+ os->interleave = 1;
+
+ if (!use_compat_mode(argv[0], pool))
+ {
+ while (1)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(os, options_table, &opt, &arg);
+
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Stash the option code in an array before parsing it. */
+ APR_ARRAY_PUSH(received_opts, int) = opt;
+
+ switch (opt)
+ {
+ case 'h':
+ case '?':
+ opt_state.help = TRUE;
+ break;
+ case 't':
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.txn, arg, pool));
+ break;
+ case 'R':
+ opt_state.recursive = TRUE;
+ break;
+ case svnauthz__version:
+ opt_state.version = TRUE;
+ break;
+ case svnauthz__username:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.username, arg, pool));
+ break;
+ case svnauthz__path:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.fspath, arg, pool));
+ opt_state.fspath = svn_fspath__canonicalize(opt_state.fspath,
+ pool);
+ break;
+ case svnauthz__repos:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_name, arg, pool));
+ break;
+ case svnauthz__is:
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.is, arg, pool));
+ break;
+ case svnauthz__groups_file:
+ SVN_ERR(
+ svn_utf_cstring_to_utf8(&opt_state.groups_file,
+ arg, pool));
+ break;
+ default:
+ {
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+ }
+ else
+ {
+ /* Pre 1.8 compatibility mode. */
+ if (argc == 1) /* No path argument */
+ subcommand = svn_opt_get_canonical_subcommand2(cmd_table, "help");
+ else
+ subcommand = svn_opt_get_canonical_subcommand2(cmd_table, "validate");
+ }
+
+ /* If the user asked for help, then the rest of the arguments are
+ the names of subcommands to get help on (if any), or else they're
+ just typos/mistakes. Whatever the case, the subcommand to
+ actually run is subcommand_help(). */
+ if (opt_state.help)
+ subcommand = svn_opt_get_canonical_subcommand2(cmd_table, "help");
+
+ if (subcommand == NULL)
+ {
+ if (os->ind >= os->argc)
+ {
+ if (opt_state.version)
+ {
+ /* Use the "help" subcommand to handle the "--version" option. */
+ static const svn_opt_subcommand_desc2_t pseudo_cmd =
+ { "--version", subcommand_help, {0}, "",
+ {svnauthz__version /* must accept its own option */ } };
+
+ subcommand = &pseudo_cmd;
+ }
+ else
+ {
+ svn_error_clear(svn_cmdline_fprintf(stderr, pool,
+ ("subcommand argument required\n")));
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ else
+ {
+ const char *first_arg;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&first_arg, os->argv[os->ind++],
+ pool));
+ subcommand = svn_opt_get_canonical_subcommand2(cmd_table, first_arg);
+ if (subcommand == NULL)
+ {
+ os->ind++;
+ svn_error_clear(
+ svn_cmdline_fprintf(stderr, pool,
+ ("Unknown subcommand: '%s'\n"),
+ first_arg));
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+
+ /* Every subcommand except `help' requires one or two non-option arguments.
+ Parse them and store them in opt_state.*/
+ if (subcommand->cmd_func != subcommand_help)
+ {
+ /* Consume a non-option argument (repos_path) if --transaction */
+ if (opt_state.txn)
+ {
+ if (os->ind +2 != argc)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("Repository and authz file arguments "
+ "required"));
+ }
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_path, os->argv[os->ind],
+ pool));
+ os->ind++;
+
+ opt_state.repos_path = svn_dirent_internal_style(opt_state.repos_path, pool);
+ }
+
+ /* Exactly 1 non-option argument */
+ if (os->ind + 1 != argc)
+ {
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("Authz file argument required"));
+ }
+
+ /* Grab AUTHZ_FILE from argv. */
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.authz_file, os->argv[os->ind],
+ pool));
+
+ /* Canonicalize opt_state.authz_file appropriately. */
+ SVN_ERR(canonicalize_access_file(&opt_state.authz_file,
+ opt_state.authz_file,
+ opt_state.txn != NULL, pool));
+
+ /* Same for opt_state.groups_file if it is present. */
+ if (opt_state.groups_file)
+ {
+ SVN_ERR(canonicalize_access_file(&opt_state.groups_file,
+ opt_state.groups_file,
+ opt_state.txn != NULL, pool));
+ }
+ }
+
+ /* Check that the subcommand wasn't passed any inappropriate options. */
+ for (i = 0; i < received_opts->nelts; i++)
+ {
+ int opt_id = APR_ARRAY_IDX(received_opts, i, int);
+
+ /* All commands implicitly accept --help, so just skip over this
+ when we see it. Note that we don't want to include this option
+ in their "accepted options" list because it would be awfully
+ redundant to display it in every commands' help text. */
+ if (opt_id == 'h' || opt_id == '?')
+ continue;
+
+ if (! svn_opt_subcommand_takes_option3(subcommand, opt_id, NULL))
+ {
+ const char *optstr;
+ const apr_getopt_option_t *badopt =
+ svn_opt_get_option_from_code2(opt_id, options_table, subcommand,
+ pool);
+ svn_opt_format_option(&optstr, badopt, FALSE, pool);
+ if (subcommand->name[0] == '-')
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ else
+ svn_error_clear(svn_cmdline_fprintf(stderr, pool,
+ ("Subcommand '%s' doesn't accept option '%s'\n"
+ "Type 'svnauthz help %s' for usage.\n"),
+ subcommand->name, optstr, subcommand->name));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* Run the subcommand. */
+ err = (*subcommand->cmd_func)(os, &opt_state, pool);
+
+ if (err)
+ {
+ if (err->apr_err == SVN_ERR_CL_INSUFFICIENT_ARGS
+ || err->apr_err == SVN_ERR_CL_ARG_PARSING_ERROR)
+ {
+ /* For argument-related problems, suggest using the 'help'
+ subcommand. */
+ err = svn_error_quick_wrap(err,
+ ("Try 'svnauthz help' for more info"));
+ }
+ else if (err->apr_err == SVN_ERR_AUTHZ_INVALID_CONFIG
+ || err->apr_err == SVN_ERR_MALFORMED_FILE)
+ {
+ /* Follow our contract that says we exit with 1 if the file does not
+ validate. */
+ *exit_code = 1;
+ return err;
+ }
+ else if (err->apr_err == SVN_ERR_AUTHZ_UNREADABLE
+ || err->apr_err == SVN_ERR_AUTHZ_UNWRITABLE
+ || err->apr_err == SVN_ERR_AUTHZ_PARTIALLY_READABLE)
+ {
+ /* Follow our contract that says we exit with 3 if --is does not
+ * match. */
+ *exit_code = 3;
+ return err;
+ }
+
+ return err;
+ }
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. Send all error messages to 'stderr'. */
+ if (svn_cmdline_init(argv[0], stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ pool = svn_pool_create(NULL);
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ if (exit_code == 0)
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svnauthz: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/server-side/svnpredumpfilter.py b/tools/server-side/svnpredumpfilter.py
new file mode 100755
index 0000000..04190c1
--- /dev/null
+++ b/tools/server-side/svnpredumpfilter.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+"""\
+Usage: 1. {PROGRAM} [OPTIONS] include INCLUDE-PATH ...
+ 2. {PROGRAM} [OPTIONS] exclude EXCLUDE-PATH ...
+
+Read a Subversion revision log output stream from stdin, analyzing its
+revision log history to see what paths would need to be additionally
+provided as part of the list of included/excluded paths if trying to
+use Subversion's 'svndumpfilter' program to include/exclude paths from
+a full dump of a repository's history.
+
+The revision log stream should be the result of 'svn log -v' or 'svn
+log -vq' when run against the root of the repository whose history
+will be filtered by a user with universal read access to the
+repository's data. Do not use the --use-merge-history (-g) or
+--stop-on-copy when generating this revision log stream.
+Use the default ordering of revisions (that is, '-r HEAD:0').
+
+Return errorcode 0 if there are no additional dependencies found, 1 if
+there were; any other errorcode indicates a fatal error.
+
+Paths in mergeinfo are not considered as additional dependencies so the
+--skip-missing-merge-sources option of 'svndumpfilter' may be required
+for successful filtering with the resulting path list.
+
+Options:
+
+ --help (-h) Show this usage message and exit.
+
+ --targets FILE Read INCLUDE-PATHs and EXCLUDE-PATHs from FILE,
+ one path per line.
+
+ --verbose (-v) Provide more information. May be used multiple
+ times for additional levels of information (-vv).
+"""
+import sys
+import os
+import getopt
+import string
+
+verbosity = 0
+
+class LogStreamError(Exception): pass
+class EOFError(Exception): pass
+
+EXIT_SUCCESS = 0
+EXIT_MOREDEPS = 1
+EXIT_FAILURE = 2
+
+def sanitize_path(path):
+ return '/'.join(filter(None, path.split('/')))
+
+def subsumes(path, maybe_child):
+ if path == maybe_child:
+ return True
+ if maybe_child.startswith(path + '/'):
+ return True
+ return False
+
+def compare_paths(path1, path2):
+ # Are the paths exactly the same?
+ if path1 == path2:
+ return 0
+
+ # Skip past common prefix
+ path1_len = len(path1);
+ path2_len = len(path2);
+ min_len = min(path1_len, path2_len)
+ i = 0
+ while (i < min_len) and (path1[i] == path2[i]):
+ i = i + 1
+
+ # Children of paths are greater than their parents, but less than
+ # greater siblings of their parents
+ char1 = '\0'
+ char2 = '\0'
+ if (i < path1_len):
+ char1 = path1[i]
+ if (i < path2_len):
+ char2 = path2[i]
+
+ if (char1 == '/') and (i == path2_len):
+ return 1
+ if (char2 == '/') and (i == path1_len):
+ return -1
+ if (i < path1_len) and (char1 == '/'):
+ return -1
+ if (i < path2_len) and (char2 == '/'):
+ return 1
+
+ # Common prefix was skipped above, next character is compared to
+ # determine order
+ return cmp(char1, char2)
+
+def log(msg, min_verbosity):
+ if verbosity >= min_verbosity:
+ if min_verbosity == 1:
+ sys.stderr.write("[* ] ")
+ elif min_verbosity == 2:
+ sys.stderr.write("[**] ")
+ sys.stderr.write(msg + "\n")
+
+class DependencyTracker:
+ def __init__(self, include_paths):
+ self.include_paths = set(include_paths)
+ self.dependent_paths = set()
+
+ def path_included(self, path):
+ for include_path in self.include_paths | self.dependent_paths:
+ if subsumes(include_path, path):
+ return True
+ return False
+
+ def include_missing_copies(self, path_copies):
+ while True:
+ log("Cross-checking %d included paths with %d copies "
+ "for missing path dependencies..." % (
+ len(self.include_paths) + len(self.dependent_paths),
+ len(path_copies)),
+ 1)
+ included_copies = []
+ for path, copyfrom_path in path_copies:
+ if self.path_included(path):
+ log("Adding copy '%s' -> '%s'" % (copyfrom_path, path), 1)
+ self.dependent_paths.add(copyfrom_path)
+ included_copies.append((path, copyfrom_path))
+ if not included_copies:
+ log("Found all missing path dependencies", 1)
+ break
+ for path, copyfrom_path in included_copies:
+ path_copies.remove((path, copyfrom_path))
+ log("Found %d new copy dependencies, need to re-check for more"
+ % len(included_copies), 1)
+
+def readline(stream):
+ line = stream.readline()
+ if not line:
+ raise EOFError("Unexpected end of stream")
+ line = line.rstrip('\n\r')
+ log(line, 2)
+ return line
+
+def svn_log_stream_get_dependencies(stream, included_paths):
+ import re
+
+ dt = DependencyTracker(included_paths)
+
+ header_re = re.compile(r'^r([0-9]+) \|.*$')
+ action_re = re.compile(r'^ [ADMR] /(.*)$')
+ copy_action_re = re.compile(r'^ [AR] /(.*) \(from /(.*):[0-9]+\)$')
+ line_buf = None
+ last_revision = 0
+ eof = False
+ path_copies = set()
+ found_changed_path = False
+
+ while not eof:
+ try:
+ line = line_buf is not None and line_buf or readline(stream)
+ except EOFError:
+ break
+
+ # We should be sitting at a log divider line.
+ if line != '-' * 72:
+ raise LogStreamError("Expected log divider line; not found.")
+
+ # Next up is a log header line.
+ try:
+ line = readline(stream)
+ except EOFError:
+ break
+ match = header_re.search(line)
+ if not match:
+ raise LogStreamError("Expected log header line; not found.")
+ pieces = map(string.strip, line.split('|'))
+ revision = int(pieces[0][1:])
+ if last_revision and revision >= last_revision:
+ raise LogStreamError("Revisions are misordered. Make sure log stream "
+ "is from 'svn log' with the youngest revisions "
+ "before the oldest ones (the default ordering).")
+ log("Parsing revision %d" % (revision), 1)
+ last_revision = revision
+ idx = pieces[-1].find(' line')
+ if idx != -1:
+ log_lines = int(pieces[-1][:idx])
+ else:
+ log_lines = 0
+
+ # Now see if there are any changed paths. If so, parse and process them.
+ line = readline(stream)
+ if line == 'Changed paths:':
+ while 1:
+ try:
+ line = readline(stream)
+ except EOFError:
+ eof = True
+ break
+ match = copy_action_re.search(line)
+ if match:
+ found_changed_path = True
+ path_copies.add((sanitize_path(match.group(1)),
+ sanitize_path(match.group(2))))
+ elif action_re.search(line):
+ found_changed_path = True
+ else:
+ break
+
+ # Finally, skip any log message lines. (If there are none,
+ # remember the last line we read, because it probably has
+ # something important in it.)
+ if log_lines:
+ for i in range(log_lines):
+ readline(stream)
+ line_buf = None
+ else:
+ line_buf = line
+
+ if not found_changed_path:
+ raise LogStreamError("No changed paths found; did you remember to run "
+ "'svn log' with the --verbose (-v) option when "
+ "generating the input to this script?")
+
+ dt.include_missing_copies(path_copies)
+ return dt
+
+def analyze_logs(included_paths):
+ print("Initial include paths:")
+ for path in included_paths:
+ print(" + /%s" % (path))
+
+ dt = svn_log_stream_get_dependencies(sys.stdin, included_paths)
+
+ if dt.dependent_paths:
+ found_new_deps = True
+ print("Dependent include paths found:")
+ for path in dt.dependent_paths:
+ print(" + /%s" % (path))
+ print("You need to also include them (or one of their parents).")
+ else:
+ found_new_deps = False
+ print("No new dependencies found!")
+ parents = {}
+ for path in dt.include_paths:
+ while 1:
+ parent = os.path.dirname(path)
+ if not parent:
+ break
+ parents[parent] = 1
+ path = parent
+ parents = parents.keys()
+ if parents:
+ print("You might still need to manually create parent directories " \
+ "for the included paths before loading a filtered dump:")
+ parents.sort(compare_paths)
+ for parent in parents:
+ print(" /%s" % (parent))
+
+ return found_new_deps and EXIT_MOREDEPS or EXIT_SUCCESS
+
+def usage_and_exit(errmsg=None):
+ program = os.path.basename(sys.argv[0])
+ stream = errmsg and sys.stderr or sys.stdout
+ stream.write(__doc__.replace("{PROGRAM}", program))
+ if errmsg:
+ stream.write("\nERROR: %s\n" % (errmsg))
+ sys.exit(errmsg and EXIT_FAILURE or EXIT_SUCCESS)
+
+def main():
+ config_dir = None
+ targets_file = None
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "hv",
+ ["help", "verbose", "targets="])
+ except getopt.GetoptError as e:
+ usage_and_exit(str(e))
+
+ for option, value in opts:
+ if option in ['-h', '--help']:
+ usage_and_exit()
+ elif option in ['-v', '--verbose']:
+ global verbosity
+ verbosity = verbosity + 1
+ elif option in ['--targets']:
+ targets_file = value
+
+ if len(args) == 0:
+ usage_and_exit("Not enough arguments")
+
+ if targets_file is None:
+ targets = args[1:]
+ else:
+ targets = map(lambda x: x.rstrip('\n\r'),
+ open(targets_file, 'r').readlines())
+ if not targets:
+ usage_and_exit("No target paths specified")
+
+ try:
+ if args[0] == 'include':
+ sys.exit(analyze_logs(map(sanitize_path, targets)))
+ elif args[0] == 'exclude':
+ usage_and_exit("Feature not implemented")
+ else:
+ usage_and_exit("Valid subcommands are 'include' and 'exclude'")
+ except SystemExit:
+ raise
+ except (LogStreamError, EOFError) as e:
+ log("ERROR: " + str(e), 0)
+ sys.exit(EXIT_FAILURE)
+ except:
+ import traceback
+ exc_type, exc, exc_tb = sys.exc_info()
+ tb = traceback.format_exception(exc_type, exc, exc_tb)
+ sys.stderr.write(''.join(tb))
+ sys.exit(EXIT_FAILURE)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/server-side/svnpubsub/README.txt b/tools/server-side/svnpubsub/README.txt
new file mode 100644
index 0000000..ad4975e
--- /dev/null
+++ b/tools/server-side/svnpubsub/README.txt
@@ -0,0 +1,24 @@
+Installation instructions:
+
+1. Set up an svnpubsub service.
+
+ This directory should be checked out to /usr/local/svnpubsub (or /opt/svnpubsub
+ on Debian).
+
+ There are init scripts for several OSes in the rc.d/ directory; add them
+ to your OS boot process in the usual way for your OS. (For example, via
+ rc.conf(5) or update-rc.d(8).)
+
+2. Run "commit-hook.py $REPOS $REV" from your post-commit hook.
+
+ (As of 1.7, these are the same ordered arguments the post-commmit hook
+ itself receives, so you can just symlink commit-hook.py as hooks/post-commit
+ hook if you don't need any other hooks to run in the server process. (This
+ isn't as insane as it sounds --- post-commit email hooks could also feed of
+ svnpubsub, and thus not be run within the committing server thread, but on
+ any other process or box that listens to the svnpubsub stream!))
+
+3. Set up svnpubsub clients.
+
+ (eg svnwcsub.py, svnpubsub/client.py,
+ 'curl -sN http://${hostname}:2069/commits')
diff --git a/tools/server-side/svnpubsub/commit-hook.py b/tools/server-side/svnpubsub/commit-hook.py
new file mode 100755
index 0000000..4e6a1cc
--- /dev/null
+++ b/tools/server-side/svnpubsub/commit-hook.py
@@ -0,0 +1,92 @@
+#!/usr/local/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+SVNLOOK="/usr/local/svn-install/current/bin/svnlook"
+#SVNLOOK="/usr/local/bin/svnlook"
+
+HOST="127.0.0.1"
+PORT=2069
+
+import sys
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import urllib2
+
+import svnpubsub.util
+
+def svnlook(cmd, **kwargs):
+ args = [SVNLOOK] + cmd
+ return svnpubsub.util.check_output(args, **kwargs)
+
+def svnlook_uuid(repo):
+ cmd = ["uuid", "--", repo]
+ return svnlook(cmd).strip()
+
+def svnlook_info(repo, revision):
+ cmd = ["info", "-r", revision, "--", repo]
+ data = svnlook(cmd, universal_newlines=True).split("\n")
+ #print data
+ return {'author': data[0].strip(),
+ 'date': data[1].strip(),
+ 'log': "\n".join(data[3:]).strip()}
+
+def svnlook_changed(repo, revision):
+ cmd = ["changed", "-r", revision, "--", repo]
+ lines = svnlook(cmd, universal_newlines=True).split("\n")
+ changed = {}
+ for line in lines:
+ line = line.strip()
+ if not line:
+ continue
+ (flags, filename) = (line[0:3], line[4:])
+ changed[filename] = {'flags': flags}
+ return changed
+
+def do_put(body):
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request("http://%s:%d/commits" %(HOST, PORT), data=body)
+ request.add_header('Content-Type', 'application/json')
+ request.get_method = lambda: 'PUT'
+ url = opener.open(request)
+
+
+def main(repo, revision):
+ revision = revision.lstrip('r')
+ i = svnlook_info(repo, revision)
+ data = {'type': 'svn',
+ 'format': 1,
+ 'id': int(revision),
+ 'changed': {},
+ 'repository': svnlook_uuid(repo),
+ 'committer': i['author'],
+ 'log': i['log'],
+ 'date': i['date'],
+ }
+ data['changed'].update(svnlook_changed(repo, revision))
+ body = json.dumps(data)
+ do_put(body)
+
+if __name__ == "__main__":
+ if len(sys.argv) not in (3, 4):
+ sys.stderr.write("invalid args\n")
+ sys.exit(1)
+
+ main(*sys.argv[1:3])
diff --git a/tools/server-side/svnpubsub/daemonize.py b/tools/server-side/svnpubsub/daemonize.py
new file mode 100644
index 0000000..9f30e59
--- /dev/null
+++ b/tools/server-side/svnpubsub/daemonize.py
@@ -0,0 +1,339 @@
+# ---------------------------------------------------------------------------
+#
+# Copyright (c) 2005, Greg Stein
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# ---------------------------------------------------------------------------
+#
+# This software lives at:
+# http://gstein.googlecode.com/svn/trunk/python/daemonize.py
+#
+
+import os
+import signal
+import sys
+import time
+import stat
+import multiprocessing # requires Python 2.6
+
+
+# possible return values from Daemon.daemonize()
+DAEMON_RUNNING = 'The daemon is running'
+DAEMON_NOT_RUNNING = 'The daemon is not running'
+DAEMON_COMPLETE = 'The daemon has completed its operations'
+DAEMON_STARTED = 'The daemon has been started'
+
+
+class Daemon(object):
+
+ def __init__(self, logfile, pidfile):
+ self.logfile = logfile
+ self.pidfile = pidfile
+
+ def foreground(self):
+ "Run in the foreground."
+ ### we should probably create a pidfile. other systems may try to detect
+ ### the pidfile to see if this "daemon" is running.
+ self.setup()
+ self.run()
+ ### remove the pidfile
+
+ def daemonize_exit(self):
+ try:
+ result = self.daemonize()
+ except (ChildFailed, DaemonFailed) as e:
+ # duplicate the exit code
+ sys.exit(e.code)
+ except (ChildTerminatedAbnormally, ChildForkFailed,
+ DaemonTerminatedAbnormally, DaemonForkFailed), e:
+ sys.stderr.write('ERROR: %s\n' % e)
+ sys.exit(1)
+ except ChildResumedIncorrectly:
+ sys.stderr.write('ERROR: continued after receiving unknown signal.\n')
+ sys.exit(1)
+
+ if result == DAEMON_STARTED or result == DAEMON_COMPLETE:
+ sys.exit(0)
+ elif result == DAEMON_NOT_RUNNING:
+ sys.stderr.write('ERROR: the daemon exited with a success code '
+ 'without signalling its startup.\n')
+ sys.exit(1)
+
+ # in original process. daemon is up and running. we're done.
+
+ def daemonize(self):
+ ### review error situations. map to backwards compat. ??
+ ### be mindful of daemonize_exit().
+ ### we should try and raise ChildFailed / ChildTerminatedAbnormally.
+ ### ref: older revisions. OR: remove exceptions.
+
+ child_is_ready = multiprocessing.Event()
+ child_completed = multiprocessing.Event()
+
+ p = multiprocessing.Process(target=self._first_child,
+ args=(child_is_ready, child_completed))
+ p.start()
+
+ # Wait for the child to finish setting things up (in case we need
+ # to communicate with it). It will only exit when ready.
+ ### use a timeout here! (parameterized, of course)
+ p.join()
+
+ ### need to propagate errors, to adjust the return codes
+ if child_completed.is_set():
+ ### what was the exit status?
+ return DAEMON_COMPLETE
+ if child_is_ready.is_set():
+ return DAEMON_RUNNING
+
+ ### how did we get here?! the immediate child should not exit without
+ ### signalling ready/complete. some kind of error.
+ return DAEMON_STARTED
+
+ def _first_child(self, child_is_ready, child_completed):
+ # we're in the child.
+
+ ### NOTE: the original design was a bit bunk. Exceptions raised from
+ ### this point are within the child processes. We need to signal the
+ ### errors to the parent in other ways.
+
+ # decouple from the parent process
+ os.chdir('/')
+ os.umask(0)
+ os.setsid()
+
+ # remember this pid so the second child can signal it.
+ thispid = os.getpid()
+
+ # if the daemon process exits before signalling readiness, then we
+ # need to see the problem. trap SIGCHLD with a SignalCatcher.
+ daemon_exit = SignalCatcher(signal.SIGCHLD)
+
+ # perform the second fork
+ try:
+ pid = os.fork()
+ except OSError as e:
+ ### this won't make it to the parent process
+ raise DaemonForkFailed(e.errno, e.strerror)
+
+ if pid > 0:
+ # in the parent.
+
+
+ # Wait for the child to be ready for operation.
+ while True:
+ # The readiness event will invariably be signalled early/first.
+ # If it *doesn't* get signalled because the child has prematurely
+ # exited, then we will pause 10ms before noticing the exit. The
+ # pause is acceptable since that is aberrant/unexpected behavior.
+ ### is there a way to break this wait() on a signal such as SIGCHLD?
+ ### parameterize this wait, in case the app knows children may
+ ### fail quickly?
+ if child_is_ready.wait(timeout=0.010):
+ # The child signalled readiness. Yay!
+ break
+ if daemon_exit.signalled:
+ # Whoops. The child exited without signalling :-(
+ break
+ # Python 2.6 compat: .wait() may exit when set, but return None
+ if child_is_ready.is_set():
+ break
+ # A simple timeout. The child is taking a while to prepare. Go
+ # back and wait for readiness.
+
+ if daemon_exit.signalled:
+ # Tell the parent that the child has exited.
+ ### we need to communicate the exit status, if possible.
+ child_completed.set()
+
+ # reap the daemon process, getting its exit code. bubble it up.
+ cpid, status = os.waitpid(pid, 0)
+ assert pid == cpid
+ if os.WIFEXITED(status):
+ code = os.WEXITSTATUS(status)
+ if code:
+ ### this won't make it to the parent process
+ raise DaemonFailed(code)
+ ### this return value is ignored
+ return DAEMON_NOT_RUNNING
+
+ # the daemon did not exit cleanly.
+ ### this won't make it to the parent process
+ raise DaemonTerminatedAbnormally(status)
+
+ # child_is_ready got asserted. the daemon is up and running, so
+ # save the pid and return success.
+ if self.pidfile:
+ # Be wary of symlink attacks
+ try:
+ os.remove(self.pidfile)
+ except OSError:
+ pass
+ fd = os.open(self.pidfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL,
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ os.write(fd, '%d\n' % pid)
+ os.close(fd)
+
+ ### this return value is ignored
+ return DAEMON_STARTED
+
+ ### old code. what to do with this? throw ChildResumedIncorrectly
+ ### or just toss this and the exception.
+ # some other signal popped us out of the pause. the daemon might not
+ # be running.
+ ### this won't make it to the parent process
+ raise ChildResumedIncorrectly()
+
+ # we're a daemon now. get rid of the final remnants of the parent:
+ # restore the signal handlers and switch std* to the proper files.
+ signal.signal(signal.SIGUSR1, signal.SIG_DFL)
+ signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+ sys.stdout.flush()
+ sys.stderr.flush()
+ si = open('/dev/null', 'r')
+ so = open(self.logfile, 'a+')
+ se = open(self.logfile, 'a+', 0) # unbuffered
+ os.dup2(si.fileno(), sys.stdin.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(se.fileno(), sys.stderr.fileno())
+ # note: we could not inline the open() calls. after the fileno() completed,
+ # the file would be closed, making the fileno invalid. gotta hold them
+ # open until now:
+ si.close()
+ so.close()
+ se.close()
+
+ ### TEST: don't release the parent immediately. the whole parent stack
+ ### should pause along with this sleep.
+ #time.sleep(10)
+
+ # everything is set up. call the initialization function.
+ self.setup()
+
+ ### TEST: exit before signalling.
+ #sys.exit(0)
+ #sys.exit(1)
+
+ # the child is now ready for parent/anyone to communicate with it.
+ child_is_ready.set()
+
+ # start the daemon now.
+ self.run()
+
+ # The daemon is shutting down, so toss the pidfile.
+ if self.pidfile:
+ try:
+ os.remove(self.pidfile)
+ except OSError:
+ pass
+
+ ### this return value is ignored
+ return DAEMON_COMPLETE
+
+ def setup(self):
+ raise NotImplementedError
+
+ def run(self):
+ raise NotImplementedError
+
+
+class _Detacher(Daemon):
+ def __init__(self, target, logfile='/dev/null', pidfile=None,
+ args=(), kwargs={}):
+ Daemon.__init__(self, logfile, pidfile)
+ self.target = target
+ self.args = args
+ self.kwargs = kwargs
+
+ def setup(self):
+ pass
+
+ def run(self):
+ self.target(*self.args, **self.kwargs)
+
+
+def run_detached(target, *args, **kwargs):
+ """Simple function to run TARGET as a detached daemon.
+
+ The additional arguments/keywords will be passed along. This function
+ does not return -- sys.exit() will be called as appropriate.
+
+ (capture SystemExit if logging/reporting is necessary)
+ ### if needed, a variant of this func could be written to not exit
+ """
+ d = _Detacher(target, args=args, kwargs=kwargs)
+ d.daemonize_exit()
+
+
+class SignalCatcher(object):
+ def __init__(self, signum):
+ self.signalled = False
+ signal.signal(signum, self.sig_handler)
+
+ def sig_handler(self, signum, frame):
+ self.signalled = True
+
+
+class ChildTerminatedAbnormally(Exception):
+ "The child process terminated abnormally."
+ def __init__(self, status):
+ Exception.__init__(self, status)
+ self.status = status
+ def __str__(self):
+ return 'child terminated abnormally (0x%04x)' % self.status
+
+class ChildFailed(Exception):
+ "The child process exited with a failure code."
+ def __init__(self, code):
+ Exception.__init__(self, code)
+ self.code = code
+ def __str__(self):
+ return 'child failed with exit code %d' % self.code
+
+class ChildForkFailed(Exception):
+ "The child process could not be forked."
+ def __init__(self, errno, strerror):
+ Exception.__init__(self, errno, strerror)
+ self.errno = errno
+ self.strerror = strerror
+ def __str__(self):
+ return 'child fork failed with error %d (%s)' % self.args
+
+class ChildResumedIncorrectly(Exception):
+ "The child resumed its operation incorrectly."
+
+class DaemonTerminatedAbnormally(Exception):
+ "The daemon process terminated abnormally."
+ def __init__(self, status):
+ Exception.__init__(self, status)
+ self.status = status
+ def __str__(self):
+ return 'daemon terminated abnormally (0x%04x)' % self.status
+
+class DaemonFailed(Exception):
+ "The daemon process exited with a failure code."
+ def __init__(self, code):
+ Exception.__init__(self, code)
+ self.code = code
+ def __str__(self):
+ return 'daemon failed with exit code %d' % self.code
+
+class DaemonForkFailed(Exception):
+ "The daemon process could not be forked."
+ def __init__(self, errno, strerror):
+ Exception.__init__(self, errno, strerror)
+ self.errno = errno
+ self.strerror = strerror
+ def __str__(self):
+ return 'daemon fork failed with error %d (%s)' % self.args
diff --git a/tools/server-side/svnpubsub/irkerbridge.py b/tools/server-side/svnpubsub/irkerbridge.py
new file mode 100755
index 0000000..ba61c99
--- /dev/null
+++ b/tools/server-side/svnpubsub/irkerbridge.py
@@ -0,0 +1,329 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# IrkerBridge - Bridge an SvnPubSub stream to Irker.
+
+# Example:
+# irkerbridge.py --daemon --pidfile pid --logfile log config
+#
+# For detailed option help use:
+# irkerbridge.py --help
+
+# It expects a config file that has the following parameters:
+# streams=url
+# Space separated list of URLs to streams.
+# This option should only be in the DEFAULT section, is ignored in
+# all other sections.
+# irker=hostname:port
+# The hostname/port combination of the irker daemon. If port is
+# omitted it defaults to 6659. Irker is connected to over UDP.
+# match=What to use to decide if the commit should be sent to irker.
+# It consists of the repository UUID followed by a slash and a glob pattern.
+# The UUID may be replaced by a * to match all UUIDs. The glob pattern will
+# be matched against all of the dirs_changed. Both the UUID and the glob
+# pattern must match to send the message to irker.
+# to=url
+# Space separated list of URLs (any URL that Irker will accept) to
+# send the resulting message to. At current Irker only supports IRC.
+# template=string
+# A string to use to format the output. The string is a Python
+# string Template. The following variables are available:
+# $committer, $id, $date, $repository, $log, $log_firstline,
+# $log_firstparagraph, $dirs_changed, $dirs_count, $dirs_count_s,
+# $subdirs_count, $subdirs_count_s, $dirs_root
+# Most of them should be self explanatory. $dirs_count is the number of
+# entries in $dirs_changed, $dirs_count_s is a friendly string version,
+# $dirs_root is the common root of all the $dirs_changed, $subdirs_count
+# is the number of subdirs under the $dirs_root that changed,
+# $subdirs_root_s is a friendly string version. $log_firstparagraph cuts
+# the log message at the first blank line and replaces newlines with spaces.
+#
+# Within the config file you have sections. Any configuration option
+# missing from a given section is found in the [DEFAULT] section.
+#
+# Section names are arbitrary names that mean nothing to the bridge. Each
+# section other than the [DEFAULT] section consists of a configuration that
+# may match and send a message to irker to deliver. All matching sections
+# will generate a message.
+#
+# Interpolation of values within the config file is allowed by including
+# %(name)s within a value. For example I can reference the UUID of a repo
+# repeatedly by doing:
+# [DEFAULT]
+# ASF_REPO=13f79535-47bb-0310-9956-ffa450edef68
+#
+# [#commits]
+# match=%(ASF_REPO)s/
+#
+# You can HUP the process to reload the config file without restarting the
+# process. However, you cannot change the streams it is listening to without
+# restarting the process.
+#
+# TODO: Logging in a better way.
+
+# Messages longer than this will be truncated and ... added to the end such
+# that the resulting message is no longer than this:
+MAX_PRIVMSG = 400
+
+import os
+import sys
+import posixpath
+import socket
+import json
+import optparse
+import ConfigParser
+import traceback
+import signal
+import re
+import fnmatch
+from string import Template
+
+try:
+ # Python >=3.0
+ from urllib.parse import urlparse
+except ImportError:
+ # Python <3.0
+ from urlparse import urlparse
+
+
+# Packages that come with svnpubsub
+import svnpubsub.client
+import daemonize
+
+class Daemon(daemonize.Daemon):
+ def __init__(self, logfile, pidfile, bdec):
+ daemonize.Daemon.__init__(self, logfile, pidfile)
+
+ self.bdec = bdec
+
+ def setup(self):
+ # There is no setup which the parent needs to wait for.
+ pass
+
+ def run(self):
+ print('irkerbridge started, pid=%d' % (os.getpid()))
+
+ mc = svnpubsub.client.MultiClient(self.bdec.urls,
+ self.bdec.commit,
+ self.bdec.event)
+ mc.run_forever()
+
+
+class BigDoEverythingClass(object):
+ def __init__(self, config, options):
+ self.config = config
+ self.options = options
+ self.urls = config.get_value('streams').split()
+
+ def locate_matching_configs(self, commit):
+ result = [ ]
+ for section in self.config.sections():
+ match = self.config.get(section, "match").split('/', 1)
+ if len(match) < 2:
+ # No slash so assume all paths
+ match.append('*')
+ match_uuid, match_path = match
+ if commit.repository == match_uuid or match_uuid == "*":
+ for path in commit.changed:
+ if fnmatch.fnmatch(path, match_path):
+ result.append(section)
+ break
+ return result
+
+ def _generate_dirs_changed(self, commit):
+ if hasattr(commit, 'dirs_changed') or not hasattr(commit, 'changed'):
+ return
+
+ dirs_changed = set()
+ for p in commit.changed:
+ if p[-1] == '/' and commit.changed[p]['flags'][1] == 'U':
+ # directory with property changes add the directory itself.
+ dirs_changed.add(p)
+ else:
+ # everything else add the parent of the path
+ # directories have a trailing slash so if it's present remove
+ # it before finding the parent. The result will be a directory
+ # so it needs a trailing slash
+ dirs_changed.add(posixpath.dirname(p.rstrip('/')) + '/')
+
+ commit.dirs_changed = dirs_changed
+ return
+
+ def fill_in_extra_args(self, commit):
+ # Set any empty members to the string "<null>"
+ v = vars(commit)
+ for k in v.keys():
+ if not v[k]:
+ v[k] = '<null>'
+
+ self._generate_dirs_changed(commit)
+ # Add entries to the commit object that are useful for
+ # formatting.
+ commit.log_firstline = commit.log.split("\n",1)[0]
+ commit.log_firstparagraph = re.split("\r?\n\r?\n",commit.log,1)[0]
+ commit.log_firstparagraph = re.sub("\r?\n"," ",commit.log_firstparagraph)
+ if commit.dirs_changed:
+ commit.dirs_root = posixpath.commonprefix(commit.dirs_changed)
+ if commit.dirs_root == '':
+ commit.dirs_root = '/'
+ commit.dirs_count = len(commit.dirs_changed)
+ if commit.dirs_count > 1:
+ commit.dirs_count_s = " (%d dirs)" %(commit.dirs_count)
+ else:
+ commit.dirs_count_s = ""
+
+ commit.subdirs_count = commit.dirs_count
+ if commit.dirs_root in commit.dirs_changed:
+ commit.subdirs_count -= 1
+ if commit.subdirs_count >= 1:
+ commit.subdirs_count_s = " + %d subdirs" % (commit.subdirs_count)
+ else:
+ commit.subdirs_count_s = ""
+
+ def _send(self, irker, msg):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ irker_list = irker.split(':')
+ if len(irker_list) < 2:
+ irker_list.append(6659)
+ json_msg = json.dumps(msg)
+ sock.sendto(json_msg, (irker_list[0],int(irker_list[1])))
+ if self.options.verbose:
+ print("SENT: %s to %s" % (json_msg, irker))
+
+ def join_all(self):
+ # Like self.commit(), but ignores self.config.get(section, "template").
+ for section in self.config.sections():
+ irker = self.config.get(section, "irker")
+ to_list = self.config.get(section, "to").split()
+ if not irker or not to_list:
+ continue
+ for to in to_list:
+ msg = {'to': to, 'privmsg': ''}
+ self._send(irker, msg)
+
+ def commit(self, url, commit):
+ if self.options.verbose:
+ print("RECV: from %s" % url)
+ print(json.dumps(vars(commit), indent=2))
+
+ try:
+ config_sections = self.locate_matching_configs(commit)
+ if len(config_sections) > 0:
+ self.fill_in_extra_args(commit)
+ for section in config_sections:
+ irker = self.config.get(section, "irker")
+ to_list = self.config.get(section, "to").split()
+ template = self.config.get(section, "template")
+ if not irker or not to_list or not template:
+ continue
+ privmsg = Template(template).safe_substitute(vars(commit))
+ if len(privmsg) > MAX_PRIVMSG:
+ privmsg = privmsg[:MAX_PRIVMSG-3] + '...'
+ for to in to_list:
+ msg = {'to': to, 'privmsg': privmsg}
+ self._send(irker, msg)
+
+ except:
+ print("Unexpected error:")
+ traceback.print_exc()
+ sys.stdout.flush()
+ raise
+
+ def event(self, url, event_name, event_arg):
+ if self.options.verbose or event_name != "ping":
+ print('EVENT: %s from %s' % (event_name, url))
+ sys.stdout.flush()
+
+
+
+class ReloadableConfig(ConfigParser.SafeConfigParser):
+ def __init__(self, fname):
+ ConfigParser.SafeConfigParser.__init__(self)
+
+ self.fname = fname
+ self.read(fname)
+
+ signal.signal(signal.SIGHUP, self.hangup)
+
+ def hangup(self, signalnum, frame):
+ self.reload()
+
+ def reload(self):
+ print("RELOAD: config file: %s" % self.fname)
+ sys.stdout.flush()
+
+ # Delete everything. Just re-reading would overlay, and would not
+ # remove sections/options. Note that [DEFAULT] will not be removed.
+ for section in self.sections():
+ self.remove_section(section)
+
+ # Get rid of [DEFAULT]
+ self.remove_section(ConfigParser.DEFAULTSECT)
+
+ # Now re-read the configuration file.
+ self.read(self.fname)
+
+ def get_value(self, which):
+ return self.get(ConfigParser.DEFAULTSECT, which)
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description='An SvnPubSub client that bridges the data to irker.',
+ usage='Usage: %prog [options] CONFIG_FILE',
+ )
+ parser.add_option('--logfile',
+ help='filename for logging')
+ parser.add_option('--verbose', action='store_true',
+ help="enable verbose logging")
+ parser.add_option('--pidfile',
+ help="the process' PID will be written to this file")
+ parser.add_option('--daemon', action='store_true',
+ help='run as a background daemon')
+
+ options, extra = parser.parse_args(args)
+
+ if len(extra) != 1:
+ parser.error('CONFIG_FILE is requried')
+ config_file = os.path.abspath(extra[0])
+
+ logfile, pidfile = None, None
+ if options.daemon:
+ if options.logfile:
+ logfile = os.path.abspath(options.logfile)
+ else:
+ parser.error('LOGFILE is required when running as a daemon')
+
+ if options.pidfile:
+ pidfile = os.path.abspath(options.pidfile)
+ else:
+ parser.error('PIDFILE is required when running as a daemon')
+
+
+ config = ReloadableConfig(config_file)
+ bdec = BigDoEverythingClass(config, options)
+
+ d = Daemon(logfile, pidfile, bdec)
+ if options.daemon:
+ d.daemonize_exit()
+ else:
+ d.foreground()
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/tools/server-side/svnpubsub/rc.d/svnpubsub b/tools/server-side/svnpubsub/rc.d/svnpubsub
new file mode 120000
index 0000000..b05e35e
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnpubsub
@@ -0,0 +1 @@
+svnpubsub.freebsd \ No newline at end of file
diff --git a/tools/server-side/svnpubsub/rc.d/svnpubsub.debian b/tools/server-side/svnpubsub/rc.d/svnpubsub.debian
new file mode 100755
index 0000000..c61057d
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnpubsub.debian
@@ -0,0 +1,62 @@
+#!/bin/bash
+### BEGIN INIT INFO
+# Provides: svnpubsub
+# Required-Start: $remote_fs
+# Required-Stop: $remote_fs
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: SvnPubSub
+# Description: start SvnPubSub daemon
+#### END INIT INFO
+
+. /lib/init/vars.sh
+. /lib/lsb/init-functions
+
+svnpubsub_user=${svnpubsub_user-"daemon"}
+svnpubsub_group=${svnpubsub_group-"daemon"}
+svnpubsub_reactor=${svnpubsub_reactor-"poll"}
+svnpubsub_pidfile=${svnpubsub_pidfile-"/var/run/svnpubsub.pid"}
+pidfile="${svnpubsub_pidfile}"
+
+TWSITD_CMD="/usr/bin/twistd -y /opt/svnpubsub/svnpubsub.tac \
+ --logfile=/var/log/svnpubsub/svnpubsub.log \
+ --pidfile=${pidfile} \
+ --uid=${svnpubsub_user} --gid=${svnpubsub_user} \
+ -r${svnpubsub_reactor}"
+
+RETVAL=0
+
+start() {
+ echo "Starting SvnPubSub Server: "
+ $TWSITD_CMD
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+stop() {
+ echo "Stopping SvnPubSub Server: "
+ THE_PID=`cat ${pidfile}`
+ kill $THE_PID
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ restart)
+ stop
+ start
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart}"
+ exit 1
+esac
+
+exit $RETVAL
diff --git a/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd b/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd
new file mode 100755
index 0000000..79b5901
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd
@@ -0,0 +1,37 @@
+#!/bin/sh
+#
+# PROVIDE: svnpubsub
+# REQUIRE: DAEMON
+# KEYWORD: shutdown
+
+. /etc/rc.subr
+
+name="svnpubsub"
+rcvar=`set_rcvar`
+
+load_rc_config $name
+
+#
+# DO NOT CHANGE THESE DEFAULT VALUES HERE
+# SET THEM IN THE /etc/rc.conf FILE
+#
+svnpubsub_enable=${svnpubsub_enable-"NO"}
+svnpubsub_user=${svnpubsub_user-"svn"}
+svnpubsub_group=${svnpubsub_group-"svn"}
+svnpubsub_reactor=${svnpubsub_reactor-"poll"}
+svnpubsub_pidfile=${svnpubsub_pidfile-"/var/run/svnpubsub/svnpubsub.pid"}
+svnpubsub_cmd_int=${svnpubsub_cmd_int-"python"}
+pidfile="${svnpubsub_pidfile}"
+
+export PYTHON_EGG_CACHE="/home/svn/.python-eggs"
+
+command="/usr/local/bin/twistd"
+command_interpreter="/usr/local/bin/${svnpubsub_cmd_int}"
+command_args="-y /usr/local/svnpubsub/svnpubsub.tac \
+ --logfile=/var/log/vc/svnpubsub.log \
+ --pidfile=${pidfile} \
+ --uid=${svnpubsub_user} --gid=${svnpubsub_user} \
+ -r${svnpubsub_reactor}"
+
+
+run_rc_command "$1"
diff --git a/tools/server-side/svnpubsub/rc.d/svnpubsub.solaris b/tools/server-side/svnpubsub/rc.d/svnpubsub.solaris
new file mode 100755
index 0000000..3a9cf9f
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnpubsub.solaris
@@ -0,0 +1,53 @@
+#!/usr/bin/bash
+#
+# a dumb init script for twistd on solaris. cus like, writing XML for SMF is f'ing lame.
+#
+
+svnpubsub_user=${svnpubsub_user-"daemon"}
+svnpubsub_group=${svnpubsub_group-"daemon"}
+svnpubsub_reactor=${svnpubsub_reactor-"poll"}
+svnpubsub_pidfile=${svnpubsub_pidfile-"/var/run/svnpubsub/svnpubsub.pid"}
+pidfile="${svnpubsub_pidfile}"
+
+TWSITD_CMD="/opt/local/bin//twistd -y /usr/local/svnpubsub/svnpubsub.tac \
+ --logfile=/x1/log/svnpubsub.log \
+ --pidfile=${pidfile} \
+ --uid=${svnpubsub_user} --gid=${svnpubsub_user} \
+ -r${svnpubsub_reactor}"
+
+RETVAL=0
+
+start() {
+ echo "Starting SvnPubSub Server: "
+ $TWSITD_CMD
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+stop() {
+ echo "Stopping SvnPubSub Server: "
+ THE_PID=`cat ${pidfile}`
+ kill $THE_PID
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ restart)
+ stop
+ start
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart}"
+ exit 1
+esac
+
+exit $RETVAL
diff --git a/tools/server-side/svnpubsub/rc.d/svnwcsub b/tools/server-side/svnpubsub/rc.d/svnwcsub
new file mode 120000
index 0000000..310fcbe
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnwcsub
@@ -0,0 +1 @@
+svnwcsub.freebsd \ No newline at end of file
diff --git a/tools/server-side/svnpubsub/rc.d/svnwcsub.debian b/tools/server-side/svnpubsub/rc.d/svnwcsub.debian
new file mode 100755
index 0000000..caf5511
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnwcsub.debian
@@ -0,0 +1,65 @@
+#!/bin/bash
+### BEGIN INIT INFO
+# Provides: svnwcsub
+# Required-Start: $remote_fs
+# Required-Stop: $remote_fs
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: SvnWcSub
+# Description: start SvnWcSub daemon
+#### END INIT INFO
+
+. /lib/init/vars.sh
+. /lib/lsb/init-functions
+
+svnwcsub_user=${svnwcsub_user-"svnwc"}
+svnwcsub_group=${svnwcsub_group-"svnwc"}
+svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub.pid"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/var/log/svnwcsub/svnwcsub.log"}
+pidfile="${svnwcsub_pidfile}"
+
+SVNWCSUB_CMD="/opt/svnpubsub/svnwcsub.py \
+ --daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ --umask=002 \
+ ${svnwcsub_config} "
+
+RETVAL=0
+
+start() {
+ echo "Starting SvnWcSub Server: "
+ $SVNWCSUB_CMD
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+stop() {
+ echo "Stopping SvnWcSub Server: "
+ THE_PID=`cat ${pidfile}`
+ kill $THE_PID
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ restart)
+ stop
+ start
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart}"
+ exit 1
+esac
+
+exit $RETVAL
diff --git a/tools/server-side/svnpubsub/rc.d/svnwcsub.freebsd b/tools/server-side/svnpubsub/rc.d/svnwcsub.freebsd
new file mode 100755
index 0000000..58ad386
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnwcsub.freebsd
@@ -0,0 +1,39 @@
+#!/bin/sh
+#
+# PROVIDE: svnwcsub
+# REQUIRE: DAEMON
+# KEYWORD: shutdown
+
+. /etc/rc.subr
+
+name="svnwcsub"
+rcvar=`set_rcvar`
+
+load_rc_config $name
+
+#
+# DO NOT CHANGE THESE DEFAULT VALUES HERE
+# SET THEM IN THE /etc/rc.conf FILE
+#
+svnwcsub_enable=${svnwcsub_enable-"NO"}
+svnwcsub_user=${svnwcsub_user-"svnwc"}
+svnwcsub_group=${svnwcsub_group-"svnwc"}
+svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub/svnwcsub.pub"}
+svnwcsub_env="PYTHON_EGG_CACHE"
+svnwcsub_cmd_int=${svnwcsub_cmd_int-"python"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/var/log/svnwcsub/svnwcsub.log"}
+pidfile="${svnwcsub_pidfile}"
+
+export PYTHON_EGG_CACHE="/var/run/svnwcsub"
+
+command="/usr/local/svnpubsub/svnwcsub.py"
+command_interpreter="/usr/local/bin/${svnwcsub_cmd_int}"
+command_args="--daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ --umask=002 \
+ ${svnwcsub_config}"
+
+run_rc_command "$1"
diff --git a/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris b/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris
new file mode 100755
index 0000000..bd0c2bd
--- /dev/null
+++ b/tools/server-side/svnpubsub/rc.d/svnwcsub.solaris
@@ -0,0 +1,56 @@
+#!/usr/bin/bash
+#
+# a dumb init script for twistd on solaris. cus like, writing XML for SMF is f'ing lame.
+#
+
+svnwcsub_user=${svnwcsub_user-"svnwc"}
+svnwcsub_group=${svnwcsub_group-"other"}
+svnwcsub_pidfile=${svnwcsub_pidfile-"/var/run/svnwcsub/svnwcsub.pid"}
+svnwcsub_config=${svnwcsub_config-"/etc/svnwcsub.conf"}
+svnwcsub_logfile=${svnwcsub_logfile-"/x1/log/svnwcsub/svnwcsub.log"}
+pidfile="${svnwcsub_pidfile}"
+
+SVNWCSUB_CMD="/usr/local/svnpubsub/svnwcsub.py \
+ --daemon \
+ --logfile=${svnwcsub_logfile} \
+ --pidfile=${pidfile} \
+ --uid=${svnwcsub_user} --gid=${svnwcsub_group} \
+ --umask=002 \
+ ${svnwcsub_config}"
+
+RETVAL=0
+
+start() {
+ echo "Starting SvnWcSub Server: "
+ $SVNWCSUB_CMD
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+stop() {
+ echo "Stopping SvnWcSub Server: "
+ THE_PID=`cat ${pidfile}`
+ kill $THE_PID
+ RETVAL=$?
+ [ $RETVAL -eq 0 ] && echo "ok" || echo "failed"
+ return $RETVAL
+}
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ restart)
+ stop
+ start
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|restart}"
+ exit 1
+esac
+
+exit $RETVAL
diff --git a/tools/server-side/svnpubsub/revprop-change-hook.py b/tools/server-side/svnpubsub/revprop-change-hook.py
new file mode 100755
index 0000000..3aa857b
--- /dev/null
+++ b/tools/server-side/svnpubsub/revprop-change-hook.py
@@ -0,0 +1,90 @@
+#!/usr/local/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+SVNLOOK="/usr/local/svn-install/current/bin/svnlook"
+#SVNLOOK="/usr/local/bin/svnlook"
+
+HOST="127.0.0.1"
+PORT=2069
+
+import sys
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import urllib2
+
+
+import svnpubsub.util
+
+def svnlook(cmd, **kwargs):
+ args = [SVNLOOK] + cmd
+ return svnpubsub.util.check_output(args, **kwargs)
+
+def svnlook_uuid(repo):
+ cmd = ["uuid", "--", repo]
+ return svnlook(cmd).strip()
+
+def svnlook_revprop(repo, revision, propname):
+ cmd = ["propget", "-r", revision, "--revprop", "--", repo, propname]
+ data = svnlook(cmd)
+ #print data
+ return data
+
+def do_put(body):
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request("http://%s:%d/metadata" %(HOST, PORT), data=body)
+ request.add_header('Content-Type', 'application/json')
+ request.get_method = lambda: 'PUT'
+ url = opener.open(request)
+
+
+def main(repo, revision, author, propname, action):
+ revision = revision.lstrip('r')
+ if action in ('A', 'M'):
+ new_value = svnlook_revprop(repo, revision, propname)
+ elif action == 'D':
+ new_value = None
+ else:
+ sys.stderr.write('Unknown revprop change action "%s"\n' % action)
+ sys.exit(1)
+ if action in ('D', 'M'):
+ old_value = sys.stdin.read()
+ else:
+ old_value = None
+ data = {'type': 'svn',
+ 'format': 1,
+ 'id': int(revision),
+ 'repository': svnlook_uuid(repo),
+ 'revprop': {
+ 'name': propname,
+ 'committer': author,
+ 'value': new_value,
+ 'old_value': old_value,
+ }
+ }
+ body = json.dumps(data)
+ do_put(body)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 6:
+ sys.stderr.write("invalid args\n")
+ sys.exit(1)
+
+ main(*sys.argv[1:6])
diff --git a/tools/server-side/svnpubsub/svnpubsub.tac b/tools/server-side/svnpubsub/svnpubsub.tac
new file mode 100644
index 0000000..574ad24
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub.tac
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import sys
+import os
+from twisted.application import service, internet
+
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+
+from svnpubsub.server import svnpubsub_server
+
+application = service.Application("SvnPubSub")
+
+def get_service():
+ return internet.TCPServer(2069, svnpubsub_server())
+
+service = get_service()
+service.setServiceParent(application)
diff --git a/tools/server-side/svnpubsub/svnpubsub/__init__.py b/tools/server-side/svnpubsub/svnpubsub/__init__.py
new file mode 100644
index 0000000..f50e195
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub/__init__.py
@@ -0,0 +1 @@
+# Turn svnpubsub/ into a package.
diff --git a/tools/server-side/svnpubsub/svnpubsub/client.py b/tools/server-side/svnpubsub/svnpubsub/client.py
new file mode 100644
index 0000000..871a5e9
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub/client.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Generic client for SvnPubSub
+#
+# ### usage...
+#
+#
+# EVENTS
+#
+# connected: a connection to the server has been opened (though not
+# necessarily established)
+# closed: the connection was closed. reconnect will be attempted.
+# error: an error closed the connection. reconnect will be attempted.
+# ping: the server has sent a keepalive
+# stale: no activity has been seen, so the connection will be closed
+# and reopened
+#
+
+import asyncore
+import asynchat
+import socket
+import functools
+import time
+import json
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
+
+# How long the polling loop should wait for activity before returning.
+TIMEOUT = 30.0
+
+# Always delay a bit when trying to reconnect. This is not precise, but sets
+# a minimum amount of delay. At the moment, there is no further backoff.
+RECONNECT_DELAY = 25.0
+
+# If we don't see anything from the server for this amount time, then we
+# will drop and reconnect. The TCP connection may have gone down without
+# us noticing it somehow.
+STALE_DELAY = 60.0
+
+
+class SvnpubsubClientException(Exception):
+ pass
+
+class Client(asynchat.async_chat):
+
+ def __init__(self, url, commit_callback, event_callback,
+ metadata_callback = None):
+ asynchat.async_chat.__init__(self)
+
+ self.last_activity = time.time()
+ self.ibuffer = []
+
+ self.url = url
+ parsed_url = urlparse.urlsplit(url)
+ if parsed_url.scheme != 'http':
+ raise ValueError("URL scheme must be http: '%s'" % url)
+ host = parsed_url.hostname
+ port = parsed_url.port
+ resource = parsed_url.path
+ if parsed_url.query:
+ resource += "?%s" % parsed_url.query
+ if parsed_url.fragment:
+ resource += "#%s" % parsed_url.fragment
+
+ self.event_callback = event_callback
+
+ self.parser = JSONRecordHandler(commit_callback, event_callback,
+ metadata_callback)
+
+ # Wait for the end of headers. Then we start parsing JSON.
+ self.set_terminator(b'\r\n\r\n')
+ self.skipping_headers = True
+
+ self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ self.connect((host, port))
+ except:
+ self.handle_error()
+ return
+
+ self.push(('GET %s HTTP/1.0\r\n\r\n' % resource).encode('ascii'))
+
+ def handle_connect(self):
+ self.event_callback('connected', None)
+
+ def handle_close(self):
+ self.event_callback('closed', None)
+ self.close()
+
+ def handle_error(self):
+ self.event_callback('error', None)
+ self.close()
+
+ def found_terminator(self):
+ if self.skipping_headers:
+ self.skipping_headers = False
+ # Each JSON record is terminated by a null character
+ self.set_terminator(b'\0')
+ else:
+ record = b"".join(self.ibuffer)
+ self.ibuffer = []
+ self.parser.feed(record.decode())
+
+ def collect_incoming_data(self, data):
+ # Remember the last time we saw activity
+ self.last_activity = time.time()
+
+ if not self.skipping_headers:
+ self.ibuffer.append(data)
+
+
+class Notification(object):
+ def __init__(self, data):
+ self.__dict__.update(data)
+
+class Commit(Notification):
+ KIND = 'COMMIT'
+
+class Metadata(Notification):
+ KIND = 'METADATA'
+
+
+class JSONRecordHandler:
+ def __init__(self, commit_callback, event_callback, metadata_callback):
+ self.commit_callback = commit_callback
+ self.event_callback = event_callback
+ self.metadata_callback = metadata_callback
+
+ EXPECTED_VERSION = 1
+
+ def feed(self, record):
+ obj = json.loads(record)
+ if 'svnpubsub' in obj:
+ actual_version = obj['svnpubsub'].get('version')
+ if actual_version != self.EXPECTED_VERSION:
+ raise SvnpubsubClientException(
+ "Unknown svnpubsub format: %r != %d"
+ % (actual_version, self.EXPECTED_VERSION))
+ self.event_callback('version', obj['svnpubsub']['version'])
+ elif 'commit' in obj:
+ commit = Commit(obj['commit'])
+ self.commit_callback(commit)
+ elif 'stillalive' in obj:
+ self.event_callback('ping', obj['stillalive'])
+ elif 'metadata' in obj and self.metadata_callback:
+ metadata = Metadata(obj['metadata'])
+ self.metadata_callback(metadata)
+
+
+class MultiClient(object):
+ def __init__(self, urls, commit_callback, event_callback,
+ metadata_callback = None):
+ self.commit_callback = commit_callback
+ self.event_callback = event_callback
+ self.metadata_callback = metadata_callback
+
+ # No target time, as no work to do
+ self.target_time = 0
+ self.work_items = [ ]
+
+ for url in urls:
+ self._add_channel(url)
+
+ def _reconnect(self, url, event_name, event_arg):
+ if event_name == 'closed' or event_name == 'error':
+ # Stupid connection closed for some reason. Set up a reconnect. Note
+ # that it should have been removed from asyncore.socket_map already.
+ self._reconnect_later(url)
+
+ # Call the user's callback now.
+ self.event_callback(url, event_name, event_arg)
+
+ def _reconnect_later(self, url):
+ # Set up a work item to reconnect in a little while.
+ self.work_items.append(url)
+
+ # Only set a target if one has not been set yet. Otherwise, we could
+ # create a race condition of continually moving out towards the future
+ if not self.target_time:
+ self.target_time = time.time() + RECONNECT_DELAY
+
+ def _add_channel(self, url):
+ # Simply instantiating the client will install it into the global map
+ # for processing in the main event loop.
+ if self.metadata_callback:
+ Client(url,
+ functools.partial(self.commit_callback, url),
+ functools.partial(self._reconnect, url),
+ functools.partial(self.metadata_callback, url))
+ else:
+ Client(url,
+ functools.partial(self.commit_callback, url),
+ functools.partial(self._reconnect, url))
+
+ def _check_stale(self):
+ now = time.time()
+ for client in asyncore.socket_map.values():
+ if client.last_activity + STALE_DELAY < now:
+ # Whoops. No activity in a while. Signal this fact, Close the
+ # Client, then have it reconnected later on.
+ self.event_callback(client.url, 'stale', client.last_activity)
+
+ # This should remove it from .socket_map.
+ client.close()
+
+ self._reconnect_later(client.url)
+
+ def _maybe_work(self):
+ # If we haven't reach the targetted time, or have no work to do,
+ # then fast-path exit
+ if time.time() < self.target_time or not self.work_items:
+ return
+
+ # We'll take care of all the work items, so no target for future work
+ self.target_time = 0
+
+ # Play a little dance just in case work gets added while we're
+ # currently working on stuff
+ work = self.work_items
+ self.work_items = [ ]
+
+ for url in work:
+ self._add_channel(url)
+
+ def run_forever(self):
+ while True:
+ if asyncore.socket_map:
+ asyncore.loop(timeout=TIMEOUT, count=1)
+ else:
+ time.sleep(TIMEOUT)
+
+ self._check_stale()
+ self._maybe_work()
diff --git a/tools/server-side/svnpubsub/svnpubsub/server.py b/tools/server-side/svnpubsub/svnpubsub/server.py
new file mode 100644
index 0000000..d0cdff9
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub/server.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# SvnPubSub - Simple Push Notification of Subversion commits
+#
+# Based on the theory behind the Live Journal Atom Streaming Service:
+# <http://atom.services.livejournal.com/>
+#
+# Instead of using a complicated XMPP/AMPQ/JMS/super messaging service,
+# we have simple HTTP GETs and PUTs to get data in and out.
+#
+# Currently supports JSON serialization.
+#
+# Example Sub clients:
+# curl -sN http://127.0.0.1:2069/commits
+# curl -sN 'http://127.0.0.1:2069/commits/svn/*'
+# curl -sN http://127.0.0.1:2069/commits/svn
+# curl -sN 'http://127.0.0.1:2069/commits/*/13f79535-47bb-0310-9956-ffa450edef68'
+# curl -sN http://127.0.0.1:2069/commits/svn/13f79535-47bb-0310-9956-ffa450edef68
+#
+# curl -sN http://127.0.0.1:2069/metadata
+# curl -sN 'http://127.0.0.1:2069/metadata/svn/*'
+# curl -sN http://127.0.0.1:2069/metadata/svn
+# curl -sN 'http://127.0.0.1:2069/metadata/*/13f79535-47bb-0310-9956-ffa450edef68'
+# curl -sN http://127.0.0.1:2069/metadata/svn/13f79535-47bb-0310-9956-ffa450edef68
+#
+# URLs are constructed from 3 parts:
+# /${notification}/${optional_type}/${optional_repository}
+#
+# Notifications can be sent for commits or metadata (e.g., revprop) changes.
+# If the type is included in the URL, you will only get notifications of that type.
+# The type can be * and then you will receive notifications of any type.
+#
+# If the repository is included in the URL, you will only receive
+# messages about that repository. The repository can be * and then you
+# will receive messages about all repositories.
+#
+# Example Pub clients:
+# curl -T revinfo.json -i http://127.0.0.1:2069/commits
+#
+# TODO:
+# - Add Real access controls (not just 127.0.0.1)
+# - Document PUT format
+# - Convert to twisted.python.log
+
+
+
+
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import sys
+
+import twisted
+from twisted.internet import reactor
+from twisted.internet import defer
+from twisted.web import server
+from twisted.web import resource
+from twisted.python import log
+
+import time
+
+class Notification(object):
+ def __init__(self, r):
+ self.__dict__.update(r)
+ if not self.check_value('repository'):
+ raise ValueError('Invalid Repository Value')
+ if not self.check_value('type'):
+ raise ValueError('Invalid Type Value')
+ if not self.check_value('format'):
+ raise ValueError('Invalid Format Value')
+ if not self.check_value('id'):
+ raise ValueError('Invalid ID Value')
+
+ def check_value(self, k):
+ return hasattr(self, k) and self.__dict__[k]
+
+ def render(self):
+ raise NotImplementedError
+
+ def render_log(self):
+ raise NotImplementedError
+
+class Commit(Notification):
+ KIND = 'COMMIT'
+
+ def render(self):
+ obj = {'commit': {}}
+ obj['commit'].update(self.__dict__)
+ return json.dumps(obj)
+
+ def render_log(self):
+ try:
+ paths_changed = " %d paths changed" % len(self.changed)
+ except:
+ paths_changed = ""
+ return "commit %s:%s repo '%s' id '%s'%s" % (
+ self.type, self.format, self.repository, self.id,
+ paths_changed)
+
+class Metadata(Notification):
+ KIND = 'METADATA'
+
+ def render(self):
+ obj = {'metadata': {}}
+ obj['metadata'].update(self.__dict__)
+ return json.dumps(obj)
+
+ def render_log(self):
+ return "metadata %s:%s repo '%s' id '%s' revprop '%s'" % (
+ self.type, self.format, self.repository, self.id,
+ self.revprop['name'])
+
+
+HEARTBEAT_TIME = 15
+
+class Client(object):
+ def __init__(self, pubsub, r, kind, type, repository):
+ self.pubsub = pubsub
+ r.notifyFinish().addErrback(self.finished)
+ self.r = r
+ self.kind = kind
+ self.type = type
+ self.repository = repository
+ self.alive = True
+ log.msg("OPEN: %s:%d (%d clients online)"% (r.getClientIP(), r.client.port, pubsub.cc()+1))
+
+ def finished(self, reason):
+ self.alive = False
+ log.msg("CLOSE: %s:%d (%d clients online)"% (self.r.getClientIP(), self.r.client.port, self.pubsub.cc()))
+ try:
+ self.pubsub.remove(self)
+ except ValueError:
+ pass
+
+ def interested_in(self, notification):
+ if self.kind != notification.KIND:
+ return False
+
+ if self.type and self.type != notification.type:
+ return False
+
+ if self.repository and self.repository != notification.repository:
+ return False
+
+ return True
+
+ def notify(self, data):
+ self.write(data)
+
+ def start(self):
+ self.write_start()
+ reactor.callLater(HEARTBEAT_TIME, self.heartbeat, None)
+
+ def heartbeat(self, args):
+ if self.alive:
+ self.write_heartbeat()
+ reactor.callLater(HEARTBEAT_TIME, self.heartbeat, None)
+
+ def write_data(self, data):
+ self.write(data + "\n\0")
+
+ """ "Data must not be unicode" is what the interfaces.ITransport says... grr. """
+ def write(self, input):
+ self.r.write(str(input))
+
+ def write_start(self):
+ self.r.setHeader('X-SVNPubSub-Version', '1')
+ self.r.setHeader('content-type', 'application/vnd.apache.vc-notify+json')
+ self.write('{"svnpubsub": {"version": 1}}\n\0')
+
+ def write_heartbeat(self):
+ self.write(json.dumps({"stillalive": time.time()}) + "\n\0")
+
+
+class SvnPubSub(resource.Resource):
+ isLeaf = True
+ clients = []
+
+ __notification_uri_map = {'commits': Commit.KIND,
+ 'metadata': Metadata.KIND}
+
+ def __init__(self, notification_class):
+ resource.Resource.__init__(self)
+ self.__notification_class = notification_class
+
+ def cc(self):
+ return len(self.clients)
+
+ def remove(self, c):
+ self.clients.remove(c)
+
+ def render_GET(self, request):
+ log.msg("REQUEST: %s" % (request.uri))
+ request.setHeader('content-type', 'text/plain')
+
+ repository = None
+ type = None
+
+ uri = request.uri.split('/')
+ uri_len = len(uri)
+ if uri_len < 2 or uri_len > 4:
+ request.setResponseCode(400)
+ return "Invalid path\n"
+
+ kind = self.__notification_uri_map.get(uri[1], None)
+ if kind is None:
+ request.setResponseCode(400)
+ return "Invalid path\n"
+
+ if uri_len >= 3:
+ type = uri[2]
+
+ if uri_len == 4:
+ repository = uri[3]
+
+ # Convert wild card to None.
+ if type == '*':
+ type = None
+ if repository == '*':
+ repository = None
+
+ c = Client(self, request, kind, type, repository)
+ self.clients.append(c)
+ c.start()
+ return twisted.web.server.NOT_DONE_YET
+
+ def notifyAll(self, notification):
+ data = notification.render()
+
+ log.msg("%s: %s (%d clients)"
+ % (notification.KIND, notification.render_log(), self.cc()))
+ for client in self.clients:
+ if client.interested_in(notification):
+ client.write_data(data)
+
+ def render_PUT(self, request):
+ request.setHeader('content-type', 'text/plain')
+ ip = request.getClientIP()
+ if ip != "127.0.0.1":
+ request.setResponseCode(401)
+ return "Access Denied"
+ input = request.content.read()
+ #import pdb;pdb.set_trace()
+ #print "input: %s" % (input)
+ try:
+ data = json.loads(input)
+ notification = self.__notification_class(data)
+ except ValueError as e:
+ request.setResponseCode(400)
+ errstr = str(e)
+ log.msg("%s: failed due to: %s" % (notification.KIND, errstr))
+ return errstr
+ self.notifyAll(notification)
+ return "Ok"
+
+
+def svnpubsub_server():
+ root = resource.Resource()
+ c = SvnPubSub(Commit)
+ m = SvnPubSub(Metadata)
+ root.putChild('commits', c)
+ root.putChild('metadata', m)
+ return server.Site(root)
+
+if __name__ == "__main__":
+ log.startLogging(sys.stdout)
+ # Port 2069 "HTTP Event Port", whatever, sounds good to me
+ reactor.listenTCP(2069, svnpubsub_server())
+ reactor.run()
+
diff --git a/tools/server-side/svnpubsub/svnpubsub/util.py b/tools/server-side/svnpubsub/svnpubsub/util.py
new file mode 100644
index 0000000..e254f8b
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub/util.py
@@ -0,0 +1,36 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import subprocess as __subprocess
+
+# check_output() is only available in Python 2.7. Allow us to run with
+# earlier versions
+try:
+ __check_output = __subprocess.check_output
+ def check_output(args, env=None, universal_newlines=False):
+ return __check_output(args, shell=False, env=env,
+ universal_newlines=universal_newlines)
+except AttributeError:
+ def check_output(args, env=None, universal_newlines=False):
+ # note: we only use these three args
+ pipe = __subprocess.Popen(args, shell=False, env=env,
+ stdout=__subprocess.PIPE,
+ universal_newlines=universal_newlines)
+ output, _ = pipe.communicate()
+ if pipe.returncode:
+ raise subprocess.CalledProcessError(pipe.returncode, args)
+ return output
diff --git a/tools/server-side/svnpubsub/svntweet.py b/tools/server-side/svnpubsub/svntweet.py
new file mode 100755
index 0000000..eae8e9a
--- /dev/null
+++ b/tools/server-side/svnpubsub/svntweet.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# SvnTweet - Subscribe to a SvnPubSub stream, and Twitter about it!
+#
+# Example:
+# svntweet.py my-config.json
+#
+# With my-config.json containing stream paths and the twitter auth info:
+# {"stream": "http://svn.apache.org:2069/commits",
+# "username": "asfcommits",
+# "password": "MyLuggageComboIs1234"}
+#
+#
+#
+
+import threading
+import sys
+import os
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+from twisted.internet import defer, reactor, task, threads
+from twisted.python import failure, log
+from twisted.web.client import HTTPClientFactory, HTTPPageDownloader
+
+try:
+ # Python >=3.0
+ from urllib.parse import urlparse
+except ImportError:
+ # Python <3.0
+ from urlparse import urlparse
+
+import time
+import posixpath
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "twitty-twister", "lib"))
+try:
+ import twitter
+except:
+ print("Get a copy of twitty-twister from <http://github.com/dustin/twitty-twister>")
+ sys.exit(-1)
+class Config(object):
+ def __init__(self, path):
+ self.path = path
+ self.mtime_path = 0
+ self.config = {}
+ self._load_config()
+
+ def _load_config(self):
+ mtime = os.path.getmtime(self.path)
+ if mtime != self.mtime_path:
+ fp = open(self.path, "rb")
+ self.mtime_path = mtime
+ self.config = json.loads(fp.read())
+
+class HTTPStream(HTTPClientFactory):
+ protocol = HTTPPageDownloader
+
+ def __init__(self, url):
+ HTTPClientFactory.__init__(self, url, method="GET", agent="SvnTweet/0.1.0")
+
+ def pageStart(self, partial):
+ pass
+
+ def pagePart(self, data):
+ pass
+
+ def pageEnd(self):
+ pass
+
+class Commit(object):
+ def __init__(self, commit):
+ self.__dict__.update(commit)
+
+class JSONRecordHandler:
+ def __init__(self, bdec):
+ self.bdec = bdec
+
+ def feed(self, record):
+ obj = json.loads(record)
+ if 'svnpubsub' in obj:
+ actual_version = obj['svnpubsub'].get('version')
+ EXPECTED_VERSION = 1
+ if actual_version != EXPECTED_VERSION:
+ raise ValueException("Unknown svnpubsub format: %r != %d"
+ % (actual_format, expected_format))
+ elif 'commit' in obj:
+ commit = Commit(obj['commit'])
+ if not hasattr(commit, 'type'):
+ raise ValueException("Commit object is missing type field.")
+ if not hasattr(commit, 'format'):
+ raise ValueException("Commit object is missing format field.")
+ if commit.type != 'svn' and commit.format != 1:
+ raise ValueException("Unexpected type and/or format: %s:%s"
+ % (commit.type, commit.format))
+ self.bdec.commit(commit)
+ elif 'stillalive' in obj:
+ self.bdec.stillalive()
+
+class JSONHTTPStream(HTTPStream):
+ def __init__(self, url, bdec):
+ HTTPStream.__init__(self, url)
+ self.bdec = bdec
+ self.ibuffer = []
+ self.parser = JSONRecordHandler(bdec)
+
+ def pageStart(self, partial):
+ self.bdec.pageStart()
+
+ def pagePart(self, data):
+ eor = data.find("\0")
+ if eor >= 0:
+ self.ibuffer.append(data[0:eor])
+ self.parser.feed(''.join(self.ibuffer))
+ self.ibuffer = [data[eor+1:]]
+ else:
+ self.ibuffer.append(data)
+
+def connectTo(url, bdec):
+ u = urlparse(url)
+ port = u.port
+ if not port:
+ port = 80
+ s = JSONHTTPStream(url, bdec)
+ conn = reactor.connectTCP(u.hostname, u.port, s)
+ return [s, conn]
+
+
+CHECKBEAT_TIME = 90
+
+class BigDoEverythingClasss(object):
+ def __init__(self, config):
+ self.c = config
+ self.c._load_config()
+ self.url = str(self.c.config.get('stream'))
+ self.failures = 0
+ self.alive = time.time()
+ self.checker = task.LoopingCall(self._checkalive)
+ self.transport = None
+ self.stream = None
+ self._restartStream()
+ self.watch = []
+ self.twit = twitter.Twitter(self.c.config.get('username'), self.c.config.get('password'))
+
+ def pageStart(self):
+ log.msg("Stream Connection Established")
+ self.failures = 0
+
+ def _restartStream(self):
+ (self.stream, self.transport) = connectTo(self.url, self)
+ self.stream.deferred.addBoth(self.streamDead)
+ self.alive = time.time()
+ self.checker.start(CHECKBEAT_TIME)
+
+ def _checkalive(self):
+ n = time.time()
+ if n - self.alive > CHECKBEAT_TIME:
+ log.msg("Stream is dead, reconnecting")
+ self.transport.disconnect()
+
+ def stillalive(self):
+ self.alive = time.time()
+
+ def streamDead(self, v):
+ BACKOFF_SECS = 5
+ BACKOFF_MAX = 60
+ self.checker.stop()
+
+ self.stream = None
+ self.failures += 1
+ backoff = min(self.failures * BACKOFF_SECS, BACKOFF_MAX)
+ log.msg("Stream disconnected, trying again in %d seconds.... %s" % (backoff, self.url))
+ reactor.callLater(backoff, self._restartStream)
+
+ def _normalize_path(self, path):
+ if path[0] != '/':
+ return "/" + path
+ return posixpath.abspath(path)
+
+ def tweet(self, msg):
+ log.msg("SEND TWEET: %s" % (msg))
+ self.twit.update(msg).addCallback(self.tweet_done).addErrback(log.msg)
+
+ def tweet_done(self, x):
+ log.msg("TWEET: Success!")
+
+ def build_tweet(self, commit):
+ maxlen = 144
+ left = maxlen
+ paths = map(self._normalize_path, commit.changed)
+ if not len(paths):
+ return None
+ path = posixpath.commonprefix(paths)
+ if path[0:1] == '/' and len(path) > 1:
+ path = path[1:]
+
+ #TODO: allow URL to be configurable.
+ link = " - http://svn.apache.org/r%d" % (commit.id)
+ left -= len(link)
+ msg = "r%d in %s by %s: " % (commit.id, path, commit.committer)
+ left -= len(msg)
+ if left > 3:
+ msg += commit.log[0:left]
+ msg += link
+ return msg
+
+ def commit(self, commit):
+ log.msg("COMMIT r%d (%d paths)" % (commit.id, len(commit.changed)))
+ msg = self.build_tweet(commit)
+ if msg:
+ self.tweet(msg)
+ #print "Common Prefix: %s" % (pre)
+
+def main(config_file):
+ c = Config(config_file)
+ big = BigDoEverythingClasss(c)
+ reactor.run()
+
+if __name__ == "__main__":
+ if len(sys.argv) != 2:
+ print("invalid args, read source code")
+ sys.exit(0)
+ log.startLogging(sys.stdout)
+ main(sys.argv[1])
diff --git a/tools/server-side/svnpubsub/svnwcsub.conf.example b/tools/server-side/svnpubsub/svnwcsub.conf.example
new file mode 100644
index 0000000..644a3b7
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnwcsub.conf.example
@@ -0,0 +1,16 @@
+[DEFAULT]
+svnbin: /usr/local/bin/svn
+streams: http://svn.example.org:2069/commits/svn
+# hook: /usr/bin/true
+
+## The values below are used by ConfigParser's interpolation syntax.
+## See http://docs.python.org/library/configparser
+SOME_REPOS: svn://svn.example.org/repos/chaos
+
+[env]
+HOME: /home/svn
+LANG: en_US.UTF-8
+
+[track]
+/usr/local/foo/prod: %(SOME_REPOS)s/foo/production
+/usr/local/foo/dev: %(SOME_REPOS)s/foo/trunk
diff --git a/tools/server-side/svnpubsub/svnwcsub.py b/tools/server-side/svnpubsub/svnwcsub.py
new file mode 100755
index 0000000..1a65b37
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnwcsub.py
@@ -0,0 +1,559 @@
+#!/usr/bin/env python
+# encoding: UTF-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# SvnWcSub - Subscribe to a SvnPubSub stream, and keep a set of working copy
+# paths in sync
+#
+# Example:
+# svnwcsub.py svnwcsub.conf
+#
+# On startup svnwcsub checks the working copy's path, runs a single svn update
+# and then watches for changes to that path.
+#
+# See svnwcsub.conf for more information on its contents.
+#
+
+# TODO:
+# - bulk update at startup time to avoid backlog warnings
+# - fold BDEC into Daemon
+# - fold WorkingCopy._get_match() into __init__
+# - remove wc_ready(). assume all WorkingCopy instances are usable.
+# place the instances into .watch at creation. the .update_applies()
+# just returns if the wc is disabled (eg. could not find wc dir)
+# - figure out way to avoid the ASF-specific PRODUCTION_RE_FILTER
+# (a base path exclusion list should work for the ASF)
+# - add support for SIGHUP to reread the config and reinitialize working copies
+# - joes will write documentation for svnpubsub as these items become fulfilled
+# - make LOGLEVEL configurable
+
+import errno
+import subprocess
+import threading
+import sys
+import stat
+import os
+import re
+import posixpath
+try:
+ import ConfigParser
+except ImportError:
+ import configparser as ConfigParser
+import time
+import logging.handlers
+try:
+ import Queue
+except ImportError:
+ import queue as Queue
+import optparse
+import functools
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
+
+import daemonize
+import svnpubsub.client
+import svnpubsub.util
+
+assert hasattr(subprocess, 'check_call')
+def check_call(*args, **kwds):
+ """Wrapper around subprocess.check_call() that logs stderr upon failure,
+ with an optional list of exit codes to consider non-failure."""
+ assert 'stderr' not in kwds
+ if '__okayexits' in kwds:
+ __okayexits = kwds['__okayexits']
+ del kwds['__okayexits']
+ else:
+ __okayexits = set([0]) # EXIT_SUCCESS
+ kwds.update(stderr=subprocess.PIPE)
+ pipe = subprocess.Popen(*args, **kwds)
+ output, errput = pipe.communicate()
+ if pipe.returncode not in __okayexits:
+ cmd = args[0] if len(args) else kwds.get('args', '(no command)')
+ # TODO: log stdout too?
+ logging.error('Command failed: returncode=%d command=%r stderr=%r',
+ pipe.returncode, cmd, errput)
+ raise subprocess.CalledProcessError(pipe.returncode, args)
+ return pipe.returncode # is EXIT_OK
+
+### note: this runs synchronously. within the current Twisted environment,
+### it is called from ._get_match() which is run on a thread so it won't
+### block the Twisted main loop.
+def svn_info(svnbin, env, path):
+ "Run 'svn info' on the target path, returning a dict of info data."
+ args = [svnbin, "info", "--non-interactive", "--", path]
+ output = svnpubsub.util.check_output(args, env=env).strip()
+ info = { }
+ for line in output.split('\n'):
+ idx = line.index(':')
+ info[line[:idx]] = line[idx+1:].strip()
+ return info
+
+try:
+ import glob
+ glob.iglob
+ def is_emptydir(path):
+ # ### If the directory contains only dotfile children, this will readdir()
+ # ### the entire directory. But os.readdir() is not exposed to us...
+ for x in glob.iglob('%s/*' % path):
+ return False
+ for x in glob.iglob('%s/.*' % path):
+ return False
+ return True
+except (ImportError, AttributeError):
+ # Python ≤2.4
+ def is_emptydir(path):
+ # This will read the entire directory list to memory.
+ return not os.listdir(path)
+
+class WorkingCopy(object):
+ def __init__(self, bdec, path, url):
+ self.path = path
+ self.url = url
+
+ try:
+ self.match, self.uuid = self._get_match(bdec.svnbin, bdec.env)
+ bdec.wc_ready(self)
+ except:
+ logging.exception('problem with working copy: %s', path)
+
+ def update_applies(self, uuid, path):
+ if self.uuid != uuid:
+ return False
+
+ path = str(path)
+ if path == self.match:
+ #print "ua: Simple match"
+ # easy case. woo.
+ return True
+ if len(path) < len(self.match):
+ # path is potentially a parent directory of match?
+ #print "ua: parent check"
+ if self.match[0:len(path)] == path:
+ return True
+ if len(path) > len(self.match):
+ # path is potentially a sub directory of match
+ #print "ua: sub dir check"
+ if path[0:len(self.match)] == self.match:
+ return True
+ return False
+
+ def _get_match(self, svnbin, env):
+ ### quick little hack to auto-checkout missing working copies
+ dotsvn = os.path.join(self.path, ".svn")
+ if not os.path.isdir(dotsvn) or is_emptydir(dotsvn):
+ logging.info("autopopulate %s from %s" % (self.path, self.url))
+ check_call([svnbin, 'co', '-q',
+ '--force',
+ '--non-interactive',
+ '--config-option',
+ 'config:miscellany:use-commit-times=on',
+ '--', self.url, self.path],
+ env=env)
+
+ # Fetch the info for matching dirs_changed against this WC
+ info = svn_info(svnbin, env, self.path)
+ root = info['Repository Root']
+ url = info['URL']
+ relpath = url[len(root):] # also has leading '/'
+ uuid = info['Repository UUID']
+ return str(relpath), uuid
+
+
+PRODUCTION_RE_FILTER = re.compile("/websites/production/[^/]+/")
+
+class BigDoEverythingClasss(object):
+ def __init__(self, config):
+ self.svnbin = config.get_value('svnbin')
+ self.env = config.get_env()
+ self.tracking = config.get_track()
+ self.hook = config.get_optional_value('hook')
+ self.streams = config.get_value('streams').split()
+ self.worker = BackgroundWorker(self.svnbin, self.env, self.hook)
+ self.watch = [ ]
+
+ def start(self):
+ for path, url in self.tracking.items():
+ # working copies auto-register with the BDEC when they are ready.
+ WorkingCopy(self, path, url)
+
+ def wc_ready(self, wc):
+ # called when a working copy object has its basic info/url,
+ # Add it to our watchers, and trigger an svn update.
+ logging.info("Watching WC at %s <-> %s" % (wc.path, wc.url))
+ self.watch.append(wc)
+ self.worker.add_work(OP_BOOT, wc)
+
+ def _normalize_path(self, path):
+ if path[0] != '/':
+ return "/" + path
+ return posixpath.abspath(path)
+
+ def commit(self, url, commit):
+ if commit.type != 'svn' or commit.format != 1:
+ logging.info("SKIP unknown commit format (%s.%d)",
+ commit.type, commit.format)
+ return
+ logging.info("COMMIT r%d (%d paths) from %s"
+ % (commit.id, len(commit.changed), url))
+
+ paths = map(self._normalize_path, commit.changed)
+ if len(paths):
+ pre = posixpath.commonprefix(paths)
+ if pre == "/websites/":
+ # special case for svnmucc "dynamic content" buildbot commits
+ # just take the first production path to avoid updating all cms working copies
+ for p in paths:
+ m = PRODUCTION_RE_FILTER.match(p)
+ if m:
+ pre = m.group(0)
+ break
+
+ #print "Common Prefix: %s" % (pre)
+ wcs = [wc for wc in self.watch if wc.update_applies(commit.repository, pre)]
+ logging.info("Updating %d WC for r%d" % (len(wcs), commit.id))
+ for wc in wcs:
+ self.worker.add_work(OP_UPDATE, wc)
+
+
+# Start logging warnings if the work backlog reaches this many items
+BACKLOG_TOO_HIGH = 20
+OP_BOOT = 'boot'
+OP_UPDATE = 'update'
+OP_CLEANUP = 'cleanup'
+
+class BackgroundWorker(threading.Thread):
+ def __init__(self, svnbin, env, hook):
+ threading.Thread.__init__(self)
+
+ # The main thread/process should not wait for this thread to exit.
+ ### compat with Python 2.5
+ self.setDaemon(True)
+
+ self.svnbin = svnbin
+ self.env = env
+ self.hook = hook
+ self.q = Queue.Queue()
+
+ self.has_started = False
+
+ def run(self):
+ while True:
+ # This will block until something arrives
+ operation, wc = self.q.get()
+
+ # Warn if the queue is too long.
+ # (Note: the other thread might have added entries to self.q
+ # after the .get() and before the .qsize().)
+ qsize = self.q.qsize()+1
+ if operation != OP_BOOT and qsize > BACKLOG_TOO_HIGH:
+ logging.warn('worker backlog is at %d', qsize)
+
+ try:
+ if operation == OP_UPDATE:
+ self._update(wc)
+ elif operation == OP_BOOT:
+ self._update(wc, boot=True)
+ elif operation == OP_CLEANUP:
+ self._cleanup(wc)
+ else:
+ logging.critical('unknown operation: %s', operation)
+ except:
+ logging.exception('exception in worker')
+
+ # In case we ever want to .join() against the work queue
+ self.q.task_done()
+
+ def add_work(self, operation, wc):
+ # Start the thread when work first arrives. Thread-start needs to
+ # be delayed in case the process forks itself to become a daemon.
+ if not self.has_started:
+ self.start()
+ self.has_started = True
+
+ self.q.put((operation, wc))
+
+ def _update(self, wc, boot=False):
+ "Update the specified working copy."
+
+ # For giggles, let's clean up the working copy in case something
+ # happened earlier.
+ self._cleanup(wc)
+
+ logging.info("updating: %s", wc.path)
+
+ ## Run the hook
+ HEAD = svn_info(self.svnbin, self.env, wc.url)['Revision']
+ if self.hook:
+ hook_mode = ['pre-update', 'pre-boot'][boot]
+ logging.info('running hook: %s at %s',
+ wc.path, hook_mode)
+ args = [self.hook, hook_mode, wc.path, HEAD, wc.url]
+ rc = check_call(args, env=self.env, __okayexits=[0, 1])
+ if rc == 1:
+ # TODO: log stderr
+ logging.warn('hook denied update of %s at %s',
+ wc.path, hook_mode)
+ return
+ del rc
+
+ ### we need to move some of these args into the config. these are
+ ### still specific to the ASF setup.
+ args = [self.svnbin, 'switch',
+ '--quiet',
+ '--non-interactive',
+ '--trust-server-cert',
+ '--ignore-externals',
+ '--config-option',
+ 'config:miscellany:use-commit-times=on',
+ '--',
+ wc.url + '@' + HEAD,
+ wc.path]
+ check_call(args, env=self.env)
+
+ ### check the loglevel before running 'svn info'?
+ info = svn_info(self.svnbin, self.env, wc.path)
+ assert info['Revision'] == HEAD
+ logging.info("updated: %s now at r%s", wc.path, info['Revision'])
+
+ ## Run the hook
+ if self.hook:
+ hook_mode = ['post-update', 'boot'][boot]
+ logging.info('running hook: %s at revision %s due to %s',
+ wc.path, info['Revision'], hook_mode)
+ args = [self.hook, hook_mode,
+ wc.path, info['Revision'], wc.url]
+ check_call(args, env=self.env)
+
+ def _cleanup(self, wc):
+ "Run a cleanup on the specified working copy."
+
+ ### we need to move some of these args into the config. these are
+ ### still specific to the ASF setup.
+ args = [self.svnbin, 'cleanup',
+ '--non-interactive',
+ '--trust-server-cert',
+ '--config-option',
+ 'config:miscellany:use-commit-times=on',
+ wc.path]
+ check_call(args, env=self.env)
+
+
+class ReloadableConfig(ConfigParser.SafeConfigParser):
+ def __init__(self, fname):
+ ConfigParser.SafeConfigParser.__init__(self)
+
+ self.fname = fname
+ self.read(fname)
+
+ ### install a signal handler to set SHOULD_RELOAD. BDEC should
+ ### poll this flag, and then adjust its internal structures after
+ ### the reload.
+ self.should_reload = False
+
+ def reload(self):
+ # Delete everything. Just re-reading would overlay, and would not
+ # remove sections/options. Note that [DEFAULT] will not be removed.
+ for section in self.sections():
+ self.remove_section(section)
+
+ # Now re-read the configuration file.
+ self.read(fname)
+
+ def get_value(self, which):
+ return self.get(ConfigParser.DEFAULTSECT, which)
+
+ def get_optional_value(self, which, default=None):
+ if self.has_option(ConfigParser.DEFAULTSECT, which):
+ return self.get(ConfigParser.DEFAULTSECT, which)
+ else:
+ return default
+
+ def get_env(self):
+ env = os.environ.copy()
+ default_options = self.defaults().keys()
+ for name, value in self.items('env'):
+ if name not in default_options:
+ env[name] = value
+ return env
+
+ def get_track(self):
+ "Return the {PATH: URL} dictionary of working copies to track."
+ track = dict(self.items('track'))
+ for name in self.defaults().keys():
+ del track[name]
+ return track
+
+ def optionxform(self, option):
+ # Do not lowercase the option name.
+ return str(option)
+
+
+class Daemon(daemonize.Daemon):
+ def __init__(self, logfile, pidfile, umask, bdec):
+ daemonize.Daemon.__init__(self, logfile, pidfile)
+
+ self.umask = umask
+ self.bdec = bdec
+
+ def setup(self):
+ # There is no setup which the parent needs to wait for.
+ pass
+
+ def run(self):
+ logging.info('svnwcsub started, pid=%d', os.getpid())
+
+ # Set the umask in the daemon process. Defaults to 000 for
+ # daemonized processes. Foreground processes simply inherit
+ # the value from the parent process.
+ if self.umask is not None:
+ umask = int(self.umask, 8)
+ os.umask(umask)
+ logging.info('umask set to %03o', umask)
+
+ # Start the BDEC (on the main thread), then start the client
+ self.bdec.start()
+
+ mc = svnpubsub.client.MultiClient(self.bdec.streams,
+ self.bdec.commit,
+ self._event)
+ mc.run_forever()
+
+ def _event(self, url, event_name, event_arg):
+ if event_name == 'error':
+ logging.exception('from %s', url)
+ elif event_name == 'ping':
+ logging.debug('ping from %s', url)
+ else:
+ logging.info('"%s" from %s', event_name, url)
+
+
+def prepare_logging(logfile):
+ "Log to the specified file, or to stdout if None."
+
+ if logfile:
+ # Rotate logs daily, keeping 7 days worth.
+ handler = logging.handlers.TimedRotatingFileHandler(
+ logfile, when='midnight', backupCount=7,
+ )
+ else:
+ handler = logging.StreamHandler(sys.stdout)
+
+ # Add a timestamp to the log records
+ formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s',
+ '%Y-%m-%d %H:%M:%S')
+ handler.setFormatter(formatter)
+
+ # Apply the handler to the root logger
+ root = logging.getLogger()
+ root.addHandler(handler)
+
+ ### use logging.INFO for now. switch to cmdline option or a config?
+ root.setLevel(logging.INFO)
+
+
+def handle_options(options):
+ # Set up the logging, then process the rest of the options.
+ prepare_logging(options.logfile)
+
+ # In daemon mode, we let the daemonize module handle the pidfile.
+ # Otherwise, we should write this (foreground) PID into the file.
+ if options.pidfile and not options.daemon:
+ pid = os.getpid()
+ # Be wary of symlink attacks
+ try:
+ os.remove(options.pidfile)
+ except OSError:
+ pass
+ fd = os.open(options.pidfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL,
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ os.write(fd, '%d\n' % pid)
+ os.close(fd)
+ logging.info('pid %d written to %s', pid, options.pidfile)
+
+ if options.gid:
+ try:
+ gid = int(options.gid)
+ except ValueError:
+ import grp
+ gid = grp.getgrnam(options.gid)[2]
+ logging.info('setting gid %d', gid)
+ os.setgid(gid)
+
+ if options.uid:
+ try:
+ uid = int(options.uid)
+ except ValueError:
+ import pwd
+ uid = pwd.getpwnam(options.uid)[2]
+ logging.info('setting uid %d', uid)
+ os.setuid(uid)
+
+
+def main(args):
+ parser = optparse.OptionParser(
+ description='An SvnPubSub client to keep working copies synchronized '
+ 'with a repository.',
+ usage='Usage: %prog [options] CONFIG_FILE',
+ )
+ parser.add_option('--logfile',
+ help='filename for logging')
+ parser.add_option('--pidfile',
+ help="the process' PID will be written to this file")
+ parser.add_option('--uid',
+ help='switch to this UID before running')
+ parser.add_option('--gid',
+ help='switch to this GID before running')
+ parser.add_option('--umask',
+ help='set this (octal) umask before running')
+ parser.add_option('--daemon', action='store_true',
+ help='run as a background daemon')
+
+ options, extra = parser.parse_args(args)
+
+ if len(extra) != 1:
+ parser.error('CONFIG_FILE is required')
+ config_file = extra[0]
+
+ if options.daemon and not options.logfile:
+ parser.error('LOGFILE is required when running as a daemon')
+ if options.daemon and not options.pidfile:
+ parser.error('PIDFILE is required when running as a daemon')
+
+ # Process any provided options.
+ handle_options(options)
+
+ c = ReloadableConfig(config_file)
+ bdec = BigDoEverythingClasss(c)
+
+ # We manage the logfile ourselves (along with possible rotation). The
+ # daemon process can just drop stdout/stderr into /dev/null.
+ d = Daemon('/dev/null', os.path.abspath(options.pidfile),
+ options.umask, bdec)
+ if options.daemon:
+ # Daemonize the process and call sys.exit() with appropriate code
+ d.daemonize_exit()
+ else:
+ # Just run in the foreground (the default)
+ d.foreground()
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/tools/server-side/svnpubsub/testserver.py b/tools/server-side/svnpubsub/testserver.py
new file mode 100755
index 0000000..8966a95
--- /dev/null
+++ b/tools/server-side/svnpubsub/testserver.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# A simple test server for responding in different ways to SvnPubSub clients.
+# This avoids the complexity of the Twisted framework in order to direct
+# various (abnormal) conditions at the client.
+#
+# ### usage...
+#
+
+import sys
+import BaseHTTPServer
+
+
+PORT = 2069
+
+TEST_BODY = '{"svnpubsub": {"version": 1}}\n\0{"commit": {"type": "svn", "format": 1, "repository": "12345678-1234-1234-1234-123456789012", "id": "1234", "committer": "johndoe", "date": "2012-01-01 01:01:01 +0000 (Sun, 01 Jan 2012)", "log": "Frob the ganoozle with the snookish", "changed": {"one/path/alpha": {"flags": "U "}, "some/other/directory/": {"flags": "_U "}}}}\n\0'
+
+SEND_KEEPALIVE = True
+
+
+class TestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ def do_GET(self):
+ self.send_response(200)
+ self.send_header('Content-Length', str(len(TEST_BODY)))
+ self.send_header('Connection', 'keep-alive')
+ self.end_headers()
+ self.wfile.write(TEST_BODY)
+
+
+if __name__ == '__main__':
+ server = BaseHTTPServer.HTTPServer(('', PORT), TestHandler)
+ sys.stderr.write('Now listening on port %d...\n' % (PORT,))
+ server.serve_forever()
diff --git a/tools/server-side/svnpubsub/watcher.py b/tools/server-side/svnpubsub/watcher.py
new file mode 100755
index 0000000..11bf066
--- /dev/null
+++ b/tools/server-side/svnpubsub/watcher.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Watch for events from SvnPubSub and print them to stdout
+#
+#
+
+import sys
+import pprint
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
+
+import svnpubsub.client
+
+
+def _commit(url, commit):
+ print('COMMIT: from %s' % url)
+ pprint.pprint(vars(commit), indent=2)
+
+def _metadata(url, metadata):
+ print('METADATA: from %s' % url)
+ pprint.pprint(vars(metadata), indent=2)
+
+def _event(url, event_name, event_arg):
+ if event_arg:
+ print('EVENT: from %s "%s" "%s"' % (url, event_name, event_arg))
+ else:
+ print('EVENT: from %s "%s"' % (url, event_name))
+
+
+def main(urls):
+ mc = svnpubsub.client.MultiClient(urls, _commit, _event, _metadata)
+ mc.run_forever()
+
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("usage: watcher.py URL [URL...]")
+ sys.exit(0)
+ main(sys.argv[1:])
diff --git a/tools/server-side/test_svn_server_log_parse.py b/tools/server-side/test_svn_server_log_parse.py
new file mode 100755
index 0000000..efc642c
--- /dev/null
+++ b/tools/server-side/test_svn_server_log_parse.py
@@ -0,0 +1,611 @@
+#!/usr/bin/python
+
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+
+# Run this without arguments to run unit tests.
+# Run with a path to a davautocheck ops log to test that it can parse that.
+
+import os
+import re
+import sys
+import tempfile
+try:
+ # Python >=3.0
+ from urllib.parse import quote as urllib_parse_quote
+except ImportError:
+ # Python <3.0
+ from urllib import quote as urllib_parse_quote
+import unittest
+
+import svn.core
+
+import svn_server_log_parse
+
+class TestCase(unittest.TestCase):
+ def setUp(self):
+ # Define a class to stuff everything passed to any handle_
+ # method into self.result.
+ class cls(svn_server_log_parse.Parser):
+ def __getattr__(cls_self, attr):
+ if attr.startswith('handle_'):
+ return lambda *a: setattr(self, 'result', a)
+ raise AttributeError
+ self.parse = cls().parse
+
+ def test_unknown(self):
+ line = 'unknown log line'
+ self.parse(line)
+ self.assertEqual(self.result, (line,))
+
+ def test_open(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'open')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'open 2 cap / SVN/1.60. fooclient')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'open a cap=() / SVN/1.60. fooclient')
+ self.assertEqual(self.parse('open 2 cap=() / SVN fooclient'), '')
+ self.assertEqual(self.result, (2, [], '/', 'SVN', 'fooclient'))
+ # TODO: Teach it about the capabilities, rather than allowing
+ # any words at all.
+ self.assertEqual(self.parse('open 2 cap=(foo) / SVN foo%20client'), '')
+ self.assertEqual(self.result, (2, ['foo'], '/', 'SVN', 'foo client'))
+
+ def test_reparent(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'reparent')
+ self.assertEqual(self.parse('reparent /'), '')
+ self.assertEqual(self.result, ('/',))
+
+ def test_get_latest_rev(self):
+ self.assertEqual(self.parse('get-latest-rev'), '')
+ self.assertEqual(self.result, ())
+ self.assertEqual(self.parse('get-latest-rev r3'), 'r3')
+ self.assertEqual(self.result, ())
+
+ def test_get_dated_rev(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse,
+ 'get-dated-rev')
+ self.assertEqual(self.parse('get-dated-rev 2008-04-15T20:41:24.000000Z'), '')
+ self.assertEqual(self.result, ('2008-04-15T20:41:24.000000Z',))
+
+ def test_commit(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'commit')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'commit 3')
+ self.assertEqual(self.parse('commit r3'), '')
+ self.assertEqual(self.result, (3,))
+ self.assertEqual(self.parse('commit r3 leftover'), ' leftover')
+ self.assertEqual(self.result, (3,))
+
+ def test_get_dir(self):
+ self.get_dir_or_file('get-dir')
+
+ def test_get_file(self):
+ self.get_dir_or_file('get-file')
+
+ def get_dir_or_file(self, c):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, c)
+ self.assertRaises(svn_server_log_parse.Error, self.parse, c + ' foo')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, c + ' foo 3')
+ self.assertEqual(self.parse(c + ' /a/b/c r3 ...'), ' ...')
+ self.assertEqual(self.result, ('/a/b/c', 3, False, False))
+ self.assertEqual(self.parse(c + ' / r3'), '')
+ self.assertEqual(self.result, ('/', 3, False, False))
+ # path must be absolute
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, c + ' a/b/c r3')
+ self.assertEqual(self.parse(c + ' /k r27 text'), '')
+ self.assertEqual(self.result, ('/k', 27, True, False))
+ self.assertEqual(self.parse(c + ' /k r27 props'), '')
+ self.assertEqual(self.result, ('/k', 27, False, True))
+ self.assertEqual(self.parse(c + ' /k r27 text props'), '')
+ self.assertEqual(self.result, ('/k', 27, True, True))
+ # out of order not accepted
+ self.assertEqual(self.parse(c + ' /k r27 props text'), ' text')
+ self.assertEqual(self.result, ('/k', 27, False, True))
+
+ def test_lock(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'lock')
+ self.parse('lock (/foo)')
+ self.assertEqual(self.result, (['/foo'], False))
+ self.assertEqual(self.parse('lock (/foo) steal ...'), ' ...')
+ self.assertEqual(self.result, (['/foo'], True))
+ self.assertEqual(self.parse('lock (/foo) stear'), ' stear')
+
+ def test_change_rev_prop(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'change-rev-prop r3')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'change-rev-prop r svn:log')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'change-rev-prop rX svn:log')
+ self.assertEqual(self.parse('change-rev-prop r3 svn:log ...'), ' ...')
+ self.assertEqual(self.result, (3, 'svn:log'))
+
+ def test_rev_proplist(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'rev-proplist')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'rev-proplist r')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'rev-proplist rX')
+ self.assertEqual(self.parse('rev-proplist r3 ...'), ' ...')
+ self.assertEqual(self.result, (3,))
+
+ def test_rev_prop(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop r')
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop rX')
+ self.assertEqual(self.parse('rev-prop r3 foo ...'), ' ...')
+ self.assertEqual(self.result, (3, 'foo'))
+
+ def test_unlock(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'unlock')
+ self.parse('unlock (/foo)')
+ self.assertEqual(self.result, (['/foo'], False))
+ self.assertEqual(self.parse('unlock (/foo) break ...'), ' ...')
+ self.assertEqual(self.result, (['/foo'], True))
+ self.assertEqual(self.parse('unlock (/foo) bear'), ' bear')
+
+ def test_get_lock(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-lock')
+ self.parse('get-lock /foo')
+ self.assertEqual(self.result, ('/foo',))
+
+ def test_get_locks(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-locks')
+ self.parse('get-locks /foo')
+ self.assertEqual(self.result, ('/foo',))
+
+ def test_get_locations(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse,
+ 'get-locations')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-locations /foo 3')
+ self.assertEqual(self.parse('get-locations /foo (3 4) ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', [3, 4]))
+ self.assertEqual(self.parse('get-locations /foo (3)'), '')
+ self.assertEqual(self.result, ('/foo', [3]))
+
+ def test_get_location_segments(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse,
+ 'get-location-segments')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-location-segments /foo 3')
+ self.assertEqual(self.parse('get-location-segments /foo@2 r3:4'), '')
+ self.assertEqual(self.result, ('/foo', 2, 3, 4))
+
+ def test_get_file_revs(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-file-revs')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-file-revs /foo 3')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-file-revs /foo 3:a')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-file-revs /foo r3:a')
+ self.assertEqual(self.parse('get-file-revs /foo r3:4 ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', 3, 4, False))
+ self.assertEqual(self.parse('get-file-revs /foo r3:4'
+ ' include-merged-revisions ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', 3, 4, True))
+
+ def test_get_mergeinfo(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-mergeinfo')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-mergeinfo /foo')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-mergeinfo (/foo')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-mergeinfo (/foo /bar')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'get-mergeinfo (/foo)')
+ self.assertRaises(svn_server_log_parse.BadMergeinfoInheritanceError,
+ self.parse, 'get-mergeinfo (/foo) bork')
+ self.assertEqual(self.parse('get-mergeinfo (/foo) explicit'), '')
+ self.assertEqual(self.result, (['/foo'],
+ svn.core.svn_mergeinfo_explicit, False))
+ self.assertEqual(self.parse('get-mergeinfo (/foo /bar) inherited ...'),
+ ' ...')
+ self.assertEqual(self.result, (['/foo', '/bar'],
+ svn.core.svn_mergeinfo_inherited, False))
+ self.assertEqual(self.result, (['/foo', '/bar'],
+ svn.core.svn_mergeinfo_inherited, False))
+
+ def test_log(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'log')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'log /foo')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'log (/foo)')
+ self.assertEqual(self.parse('log (/foo) r3:4'
+ ' include-merged-revisions'), '')
+ self.assertEqual(self.result,
+ (['/foo'], 3, 4, 0, False, False, True, []))
+ self.assertEqual(self.parse('log (/foo /bar) r3:4 revprops=all ...'),
+ ' ...')
+ self.assertEqual(self.result,
+ (['/foo', '/bar'], 3, 4, 0, False, False, False, None))
+ self.assertEqual(self.parse('log (/foo) r3:4 revprops=(a b) ...'),
+ ' ...')
+ self.assertEqual(self.result,
+ (['/foo'], 3, 4, 0, False, False, False, ['a', 'b']))
+ self.assertEqual(self.parse('log (/foo) r8:1 limit=3'), '')
+ self.assertEqual(self.result,
+ (['/foo'], 8, 1, 3, False, False, False, []))
+
+ def test_check_path(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'check-path')
+ self.assertEqual(self.parse('check-path /foo@9'), '')
+ self.assertEqual(self.result, ('/foo', 9))
+
+ def test_stat(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'stat')
+ self.assertEqual(self.parse('stat /foo@9'), '')
+ self.assertEqual(self.result, ('/foo', 9))
+
+ def test_replay(self):
+ self.assertRaises(svn_server_log_parse.Error, self.parse, 'replay')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'replay /foo')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'replay (/foo) r9')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'replay (/foo) r9:10')
+ self.assertEqual(self.parse('replay /foo r9'), '')
+ self.assertEqual(self.result, ('/foo', 9))
+
+ def test_checkout_or_export(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'checkout-or-export')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'checkout-or-export /foo')
+ self.assertEqual(self.parse('checkout-or-export /foo r9'), '')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown))
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'checkout-or-export /foo r9 depth=INVALID-DEPTH')
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'checkout-or-export /foo r9 depth=bork')
+ self.assertEqual(self.parse('checkout-or-export /foo r9 depth=files .'),
+ ' .')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files))
+
+ def test_diff_1path(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'diff')
+ self.assertEqual(self.parse('diff /foo r9:10'), '')
+ self.assertEqual(self.result, ('/foo', 9, 10,
+ svn.core.svn_depth_unknown, False))
+ self.assertEqual(self.parse('diff /foo r9:10'
+ ' ignore-ancestry ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', 9, 10,
+ svn.core.svn_depth_unknown, True))
+ self.assertEqual(self.parse('diff /foo r9:10 depth=files'), '')
+ self.assertEqual(self.result, ('/foo', 9, 10,
+ svn.core.svn_depth_files, False))
+
+ def test_diff_2paths(self):
+ self.assertEqual(self.parse('diff /foo@9 /bar@10'), '')
+ self.assertEqual(self.result, ('/foo', 9, '/bar', 10,
+ svn.core.svn_depth_unknown, False))
+ self.assertEqual(self.parse('diff /foo@9 /bar@10'
+ ' ignore-ancestry ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', 9, '/bar', 10,
+ svn.core.svn_depth_unknown, True))
+ self.assertEqual(self.parse('diff /foo@9 /bar@10'
+ ' depth=files ignore-ancestry'), '')
+ self.assertEqual(self.result, ('/foo', 9, '/bar', 10,
+ svn.core.svn_depth_files, True))
+
+ def test_status(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'status')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'status /foo')
+ self.assertEqual(self.parse('status /foo r9'), '')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown))
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'status /foo r9 depth=INVALID-DEPTH')
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'status /foo r9 depth=bork')
+ self.assertEqual(self.parse('status /foo r9 depth=files .'),
+ ' .')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files))
+
+ def test_switch(self):
+ self.assertEqual(self.parse('switch /foo /bar@10 ...'), ' ...')
+ self.assertEqual(self.result, ('/foo', '/bar', 10,
+ svn.core.svn_depth_unknown))
+ self.assertEqual(self.parse('switch /foo /bar@10'
+ ' depth=files'), '')
+ self.assertEqual(self.result, ('/foo', '/bar', 10,
+ svn.core.svn_depth_files))
+
+ def test_update(self):
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'update')
+ self.assertRaises(svn_server_log_parse.Error,
+ self.parse, 'update /foo')
+ self.assertEqual(self.parse('update /foo r9'), '')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown,
+ False))
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'update /foo r9 depth=INVALID-DEPTH')
+ self.assertRaises(svn_server_log_parse.BadDepthError, self.parse,
+ 'update /foo r9 depth=bork')
+ self.assertEqual(self.parse('update /foo r9 depth=files .'), ' .')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files,
+ False))
+ self.assertEqual(self.parse('update /foo r9 send-copyfrom-args .'),
+ ' .')
+ self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown,
+ True))
+
+if __name__ == '__main__':
+ if len(sys.argv) == 1:
+ # No arguments so run the unit tests.
+ unittest.main()
+ sys.stderr.write('unittest.main failed to exit\n')
+ sys.exit(2)
+
+ # Use the argument as the path to a log file to test against.
+
+ def uri_encode(s):
+ # urllib.parse.quote encodes :&@ characters, svn does not.
+ return urllib_parse_quote(s, safe='/:&@')
+
+ # Define a class to reconstruct the SVN-ACTION string.
+ class Test(svn_server_log_parse.Parser):
+ def handle_unknown(self, line):
+ sys.stderr.write('unknown log line at %d:\n%s\n' % (self.linenum,
+ line))
+ sys.exit(2)
+
+ def handle_open(self, protocol, capabilities, path, ra_client, client):
+ capabilities = ' '.join(capabilities)
+ if ra_client is None:
+ ra_client = '-'
+ if client is None:
+ client = '-'
+ path = uri_encode(path)
+ self.action = ('open %d cap=(%s) %s %s %s'
+ % (protocol, capabilities, path, ra_client, client))
+
+ def handle_reparent(self, path):
+ path = uri_encode(path)
+ self.action = 'reparent ' + path
+
+ def handle_get_latest_rev(self):
+ self.action = 'get-latest-rev'
+
+ def handle_get_dated_rev(self, date):
+ self.action = 'get-dated-rev ' + date
+
+ def handle_commit(self, revision):
+ self.action = 'commit r%d' % (revision,)
+
+ def handle_get_dir(self, path, revision, text, props):
+ path = uri_encode(path)
+ self.action = 'get-dir %s r%d' % (path, revision)
+ if text:
+ self.action += ' text'
+ if props:
+ self.action += ' props'
+
+ def handle_get_file(self, path, revision, text, props):
+ path = uri_encode(path)
+ self.action = 'get-file %s r%d' % (path, revision)
+ if text:
+ self.action += ' text'
+ if props:
+ self.action += ' props'
+
+ def handle_lock(self, paths, steal):
+ paths = [uri_encode(x) for x in paths]
+ self.action = 'lock (%s)' % (' '.join(paths),)
+ if steal:
+ self.action += ' steal'
+
+ def handle_change_rev_prop(self, revision, revprop):
+ revprop = uri_encode(revprop)
+ self.action = 'change-rev-prop r%d %s' % (revision, revprop)
+
+ def handle_rev_prop(self, revision, revprop):
+ revprop = uri_encode(revprop)
+ self.action = 'rev-prop r%d %s' % (revision, revprop)
+
+ def handle_rev_proplist(self, revision):
+ self.action = 'rev-proplist r%d' % (revision,)
+
+ def handle_unlock(self, paths, break_lock):
+ paths = [uri_encode(x) for x in paths]
+ self.action = 'unlock (%s)' % (' '.join(paths),)
+ if break_lock:
+ self.action += ' break'
+
+ def handle_get_lock(self, path):
+ path = uri_encode(path)
+ self.action = 'get-lock ' + path
+
+ def handle_get_locks(self, path):
+ self.action = 'get-locks ' + path
+ path = uri_encode(path)
+
+ def handle_get_locations(self, path, revisions):
+ path = uri_encode(path)
+ self.action = ('get-locations %s (%s)'
+ % (path, ' '.join([str(x) for x in revisions])))
+
+ def handle_get_location_segments(self, path, peg, left, right):
+ path = uri_encode(path)
+ self.action = 'get-location-segments %s@%d r%d:%d' % (path, peg,
+ left, right)
+
+ def handle_get_file_revs(self, path, left, right,
+ include_merged_revisions):
+ path = uri_encode(path)
+ self.action = 'get-file-revs %s r%d:%d' % (path, left, right)
+ if include_merged_revisions:
+ self.action += ' include-merged-revisions'
+
+ def handle_get_mergeinfo(self, paths, inheritance, include_descendants):
+ paths = [uri_encode(x) for x in paths]
+ self.action = ('get-mergeinfo (%s) %s'
+ % (' '.join(paths),
+ svn.core.svn_inheritance_to_word(inheritance)))
+ if include_descendants:
+ self.action += ' include-descendants'
+
+ def handle_log(self, paths, left, right, limit, discover_changed_paths,
+ strict, include_merged_revisions, revprops):
+ paths = [uri_encode(x) for x in paths]
+ self.action = 'log (%s) r%d:%d' % (' '.join(paths),
+ left, right)
+ if limit != 0:
+ self.action += ' limit=%d' % (limit,)
+ if discover_changed_paths:
+ self.action += ' discover-changed-paths'
+ if strict:
+ self.action += ' strict'
+ if include_merged_revisions:
+ self.action += ' include-merged-revisions'
+ if revprops is None:
+ self.action += ' revprops=all'
+ elif len(revprops) > 0:
+ revprops = [uri_encode(x) for x in revprops]
+ self.action += ' revprops=(%s)' % (' '.join(revprops),)
+
+ def handle_check_path(self, path, revision):
+ path = uri_encode(path)
+ self.action = 'check-path %s@%d' % (path, revision)
+
+ def handle_stat(self, path, revision):
+ path = uri_encode(path)
+ self.action = 'stat %s@%d' % (path, revision)
+
+ def handle_replay(self, path, revision):
+ path = uri_encode(path)
+ self.action = 'replay %s r%d' % (path, revision)
+
+ def maybe_depth(self, depth):
+ if depth != svn.core.svn_depth_unknown:
+ self.action += ' depth=%s' % (
+ svn.core.svn_depth_to_word(depth),)
+
+ def handle_checkout_or_export(self, path, revision, depth):
+ path = uri_encode(path)
+ self.action = 'checkout-or-export %s r%d' % (path, revision)
+ self.maybe_depth(depth)
+
+ def handle_diff_1path(self, path, left, right,
+ depth, ignore_ancestry):
+ path = uri_encode(path)
+ self.action = 'diff %s r%d:%d' % (path, left, right)
+ self.maybe_depth(depth)
+ if ignore_ancestry:
+ self.action += ' ignore-ancestry'
+
+ def handle_diff_2paths(self, from_path, from_rev,
+ to_path, to_rev,
+ depth, ignore_ancestry):
+ from_path = uri_encode(from_path)
+ to_path = uri_encode(to_path)
+ self.action = ('diff %s@%d %s@%d'
+ % (from_path, from_rev, to_path, to_rev))
+ self.maybe_depth(depth)
+ if ignore_ancestry:
+ self.action += ' ignore-ancestry'
+
+ def handle_status(self, path, revision, depth):
+ path = uri_encode(path)
+ self.action = 'status %s r%d' % (path, revision)
+ self.maybe_depth(depth)
+
+ def handle_switch(self, from_path, to_path, to_rev, depth):
+ from_path = uri_encode(from_path)
+ to_path = uri_encode(to_path)
+ self.action = ('switch %s %s@%d'
+ % (from_path, to_path, to_rev))
+ self.maybe_depth(depth)
+
+ def handle_update(self, path, revision, depth, send_copyfrom_args):
+ path = uri_encode(path)
+ self.action = 'update %s r%d' % (path, revision)
+ self.maybe_depth(depth)
+ if send_copyfrom_args:
+ self.action += ' send-copyfrom-args'
+
+ tmp = tempfile.mktemp()
+ try:
+ fp = open(tmp, 'w')
+ parser = Test()
+ parser.linenum = 0
+ log_file = sys.argv[1]
+ log_type = None
+ for line in open(log_file):
+ if log_type is None:
+ # Figure out which log type we have.
+ if re.match(r'\d+ \d\d\d\d-', line):
+ log_type = 'svnserve'
+ elif re.match(r'\[\d\d/', line):
+ log_type = 'mod_dav_svn'
+ else:
+ sys.stderr.write("unknown log format in '%s'"
+ % (log_file,))
+ sys.exit(3)
+ sys.stderr.write('parsing %s log...\n' % (log_type,))
+ sys.stderr.flush()
+
+ words = line.split()
+ if log_type == 'svnserve':
+ # Skip over PID, date, client address, username, and repos.
+ if words[5].startswith('ERR'):
+ # Skip error lines.
+ fp.write(line)
+ continue
+ leading = ' '.join(words[:5])
+ action = ' '.join(words[5:])
+ else:
+ # Find the SVN-ACTION string from the CustomLog format
+ # davautocheck.sh uses. If that changes, this will need
+ # to as well. Currently it's
+ # %t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e
+ leading = ' '.join(words[:4])
+ action = ' '.join(words[4:])
+
+ # Parse the action and write the reconstructed action to
+ # the temporary file. Ignore the returned trailing text,
+ # as we have none in the davautocheck ops log.
+ parser.linenum += 1
+ try:
+ parser.parse(action)
+ except svn_server_log_parse.Error:
+ sys.stderr.write('error at line %d: %s\n'
+ % (parser.linenum, action))
+ raise
+ fp.write(leading + ' ' + parser.action + '\n')
+ fp.close()
+ # Check differences between original and reconstructed files
+ # (should be identical).
+ result = os.spawnlp(os.P_WAIT, 'diff', 'diff', '-u', log_file, tmp)
+ if result == 0:
+ sys.stderr.write('OK\n')
+ sys.exit(result)
+ finally:
+ try:
+ os.unlink(tmp)
+ except Exception as e:
+ sys.stderr.write('os.unlink(tmp): %s\n' % (e,))
diff --git a/tools/xslt/svnindex.css b/tools/xslt/svnindex.css
new file mode 100644
index 0000000..09752c9
--- /dev/null
+++ b/tools/xslt/svnindex.css
@@ -0,0 +1,108 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+
+/* A sample style sheet for displaying the Subversion directory listing
+ that is generated by mod_dav_svn and "svnindex.xsl". */
+
+body{
+ margin: 0;
+ padding: 0;
+}
+
+a {
+ color: navy;
+}
+
+.footer {
+ margin-top: 8em;
+ padding: 0.5em 1em 0.5em;
+ border: 1px solid;
+ border-width: 1px 0;
+ clear: both;
+ border-color: rgb(30%,30%,50%) navy rgb(75%,80%,85%) navy;
+ background: rgb(88%,90%,92%);
+ font-size: 80%;
+}
+
+.svn {
+ margin: 3em;
+}
+
+.rev {
+ margin-right: 3px;
+ padding-left: 3px;
+ text-align: left;
+ font-size: 120%;
+}
+
+.dir a {
+ text-decoration: none;
+ color: black;
+}
+
+.file a {
+ text-decoration: none;
+ color: black;
+}
+
+.path {
+ margin: 3px;
+ padding: 3px;
+ background: #FFCC66;
+ font-size: 120%;
+}
+
+.updir {
+ margin: 3px;
+ padding: 3px;
+ margin-left: 3em;
+ background: #FFEEAA;
+}
+
+.file {
+ margin: 3px;
+ padding: 3px;
+ margin-left: 3em;
+ background: rgb(95%,95%,95%);
+}
+
+.file:hover {
+ margin: 3px;
+ padding: 3px;
+ margin-left: 3em;
+ background: rgb(100%,100%,90%);
+/* border: 1px black solid; */
+}
+
+.dir {
+ margin: 3px;
+ padding: 3px;
+ margin-left: 3em;
+ background: rgb(90%,90%,90%);
+}
+
+.dir:hover {
+ margin: 3px;
+ padding: 3px;
+ margin-left: 3em;
+ background: rgb(100%,100%,80%);
+/* border: 1px black solid; */
+}
diff --git a/tools/xslt/svnindex.xsl b/tools/xslt/svnindex.xsl
new file mode 100644
index 0000000..83454f7
--- /dev/null
+++ b/tools/xslt/svnindex.xsl
@@ -0,0 +1,123 @@
+<?xml version="1.0"?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+
+
+<!-- A sample XML transformation style sheet for displaying the Subversion
+ directory listing that is generated by mod_dav_svn when the "SVNIndexXSLT"
+ directive is used. -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+
+ <xsl:output method="html"/>
+
+ <xsl:template match="*"/>
+
+ <xsl:template match="svn">
+ <html>
+ <head>
+ <title>
+ <xsl:if test="string-length(index/@name) != 0">
+ <xsl:value-of select="index/@name"/>
+ <xsl:text>: </xsl:text>
+ </xsl:if>
+ <xsl:value-of select="index/@path"/>
+ </title>
+ <link rel="stylesheet" type="text/css" href="/svnindex.css"/>
+ </head>
+ <body>
+ <div class="svn">
+ <xsl:apply-templates/>
+ </div>
+ <div class="footer">
+ <xsl:text>Powered by </xsl:text>
+ <xsl:element name="a">
+ <xsl:attribute name="href">
+ <xsl:value-of select="@href"/>
+ </xsl:attribute>
+ <xsl:text>Subversion</xsl:text>
+ </xsl:element>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@version"/>
+ </div>
+ </body>
+ </html>
+ </xsl:template>
+
+ <xsl:template match="index">
+ <div class="rev">
+ <xsl:value-of select="@name"/>
+ <xsl:if test="@base">
+ <xsl:if test="@name">
+ <xsl:text>:&#xA0; </xsl:text>
+ </xsl:if>
+ <xsl:value-of select="@base" />
+ </xsl:if>
+ <xsl:if test="@rev">
+ <xsl:if test="@base | @name">
+ <xsl:text> &#x2014; </xsl:text>
+ </xsl:if>
+ <xsl:text>Revision </xsl:text>
+ <xsl:value-of select="@rev"/>
+ </xsl:if>
+ </div>
+ <div class="path">
+ <xsl:value-of select="@path"/>
+ </div>
+ <xsl:apply-templates select="updir"/>
+ <xsl:apply-templates select="dir"/>
+ <xsl:apply-templates select="file"/>
+ </xsl:template>
+
+ <xsl:template match="updir">
+ <div class="updir">
+ <xsl:text>[</xsl:text>
+ <xsl:element name="a">
+ <xsl:attribute name="href">..</xsl:attribute>
+ <xsl:text>Parent Directory</xsl:text>
+ </xsl:element>
+ <xsl:text>]</xsl:text>
+ </div>
+ </xsl:template>
+
+ <xsl:template match="dir">
+ <div class="dir">
+ <xsl:element name="a">
+ <xsl:attribute name="href">
+ <xsl:value-of select="@href"/>
+ </xsl:attribute>
+ <xsl:value-of select="@name"/>
+ <xsl:text>/</xsl:text>
+ </xsl:element>
+ </div>
+ </xsl:template>
+
+ <xsl:template match="file">
+ <div class="file">
+ <xsl:element name="a">
+ <xsl:attribute name="href">
+ <xsl:value-of select="@href"/>
+ </xsl:attribute>
+ <xsl:value-of select="@name"/>
+ </xsl:element>
+ </div>
+ </xsl:template>
+
+</xsl:stylesheet>